diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index 402f9a8e9a..e622dd198b 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -4,6 +4,42 @@ For more detailed information, please see the git log.
These release notes can also be consulted at https://easybuild.readthedocs.io/en/latest/Release_notes.html.
+v4.4.2 (September 7th 2021)
+---------------------------
+
+update/bugfix release
+
+- various enhancements, including:
+ - add per-extension timing in output produced by eb command (#3734)
+ - add definition for new toolchain nvpsmpic (NVHPC + ParaStationMPI + CUDA) (#3736)
+ - include list of missing libraries in warning about missing FFTW libraries in imkl toolchain component (#3776)
+ - check for recursive symlinks by default before copying a folder (#3784)
+ - add --filter-ecs configuration option to filter out easyconfigs from set of easyconfigs to install (#3796)
+ - check type of source_tmpl value for extensions, ensure it's a string value (not a list) (#3799)
+ - also define $BLAS_SHARED_LIBS & co in build environment (analogous to $BLAS_STATIC_LIBS) (#3800)
+ - report use of --ignore-test-failure in success message in output (#3806)
+ - add get_cuda_cc_template_value method to EasyConfig class (#3807)
+ - add support for fix_bash_shebang_for (#3808)
+ - pick up $MODULES_CMD to facilitate using Environment Modules 4.x as modules tool (#3816)
+ - use more sensible branch name for creating easyblocks PR with --new-pr (#3817)
+- various bug fixes, including:
+ - remove Python 2.6 from list of supported Python versions in setup.py (#3767)
+ - don't add directory that doesn't include any files to $PATH or $LD_LIBRARY_PATH (#3769)
+ - make logdir writable also when --stop/--fetch is used and --read-only-installdir is enabled (#3771)
+ - fix forgotten renaming of 'l' to 'char' __init__.py that is created for included Python modules (#3773)
+ - fix verify_imports by deleting all imported modules before re-importing them one by one (#3780)
+ - fix ignore_test_failure not set for Extension instances (#3782)
+ - update iompi toolchain to intel-compiler subtoolchain for oneAPI versions (>= iompi 2020.12) (#3785)
+ - don't parse patch files as easyconfigs when searching for where patch file is used (#3786)
+ - make sure git clone with a tag argument actually downloads a tag (#3795)
+ - fix CI by excluding GC3Pie 2.6.7 (which is broken with Python 2) and improve error reporting for option parsing (#3798)
+ - correctly resolve templates for patches in extensions when uploading to GitHub (#3805)
+ - add --easystack to ignored options when submitting job (#3813)
+- other changes:
+ - speed up tests by caching checked paths in set_tmpdir + less test cases for test_compiler_dependent_optarch (#3802)
+ - speed up set_parallel method in EasyBlock class (#3812)
+
+
v4.4.1 (July 6th 2021)
----------------------
diff --git a/easybuild/framework/easyblock.py b/easybuild/framework/easyblock.py
index 3b1ffa3096..58bed6afde 100644
--- a/easybuild/framework/easyblock.py
+++ b/easybuild/framework/easyblock.py
@@ -212,6 +212,10 @@ def __init__(self, ec):
self.postmsg = '' # allow a post message to be set, which can be shown as last output
self.current_step = None
+ # Create empty progress bar
+ self.progress_bar = None
+ self.pbar_task = None
+
# list of loaded modules
self.loaded_modules = []
@@ -300,6 +304,21 @@ def close_log(self):
self.log.info("Closing log for application name %s version %s" % (self.name, self.version))
fancylogger.logToFile(self.logfile, enable=False)
+ def set_progress_bar(self, progress_bar, task_id):
+ """
+ Set progress bar, the progress bar is needed when writing messages so
+ that the progress counter is always at the bottom
+ """
+ self.progress_bar = progress_bar
+ self.pbar_task = task_id
+
+ def advance_progress(self, tick=1.0):
+ """
+ Advance the progress bar forward with `tick`
+ """
+ if self.progress_bar and self.pbar_task is not None:
+ self.progress_bar.advance(self.pbar_task, tick)
+
#
# DRY RUN UTILITIES
#
@@ -593,7 +612,13 @@ def fetch_extension_sources(self, skip_checksums=False):
default_source_tmpl = resolve_template('%(name)s-%(version)s.tar.gz', template_values)
# if no sources are specified via 'sources', fall back to 'source_tmpl'
- src_fn = ext_options.get('source_tmpl', default_source_tmpl)
+ src_fn = ext_options.get('source_tmpl')
+ if src_fn is None:
+ src_fn = default_source_tmpl
+ elif not isinstance(src_fn, string_type):
+ error_msg = "source_tmpl value must be a string! (found value of type '%s'): %s"
+ raise EasyBuildError(error_msg, type(src_fn).__name__, src_fn)
+
src_path = self.obtain_file(src_fn, extension=True, urls=source_urls,
force_download=force_download)
if src_path:
@@ -1785,18 +1810,19 @@ def set_parallel(self):
"""Set 'parallel' easyconfig parameter to determine how many cores can/should be used for parallel builds."""
# set level of parallelism for build
par = build_option('parallel')
- if self.cfg['parallel'] is not None:
- if par is None:
- par = self.cfg['parallel']
- self.log.debug("Desired parallelism specified via 'parallel' easyconfig parameter: %s", par)
- else:
- par = min(int(par), int(self.cfg['parallel']))
- self.log.debug("Desired parallelism: minimum of 'parallel' build option/easyconfig parameter: %s", par)
- else:
+ cfg_par = self.cfg['parallel']
+ if cfg_par is None:
self.log.debug("Desired parallelism specified via 'parallel' build option: %s", par)
+ elif par is None:
+ par = cfg_par
+ self.log.debug("Desired parallelism specified via 'parallel' easyconfig parameter: %s", par)
+ else:
+ par = min(int(par), int(cfg_par))
+ self.log.debug("Desired parallelism: minimum of 'parallel' build option/easyconfig parameter: %s", par)
- self.cfg['parallel'] = det_parallelism(par=par, maxpar=self.cfg['maxparallel'])
- self.log.info("Setting parallelism: %s" % self.cfg['parallel'])
+ par = det_parallelism(par, maxpar=self.cfg['maxparallel'])
+ self.log.info("Setting parallelism: %s" % par)
+ self.cfg['parallel'] = par
def remove_module_file(self):
"""Remove module file (if it exists), and check for ghost installation directory (and deal with it)."""
@@ -2412,6 +2438,7 @@ def extensions_step(self, fetch=False, install=True):
tup = (ext.name, ext.version or '', idx + 1, exts_cnt)
print_msg("installing extension %s %s (%d/%d)..." % tup, silent=self.silent)
+ start_time = datetime.now()
if self.dry_run:
tup = (ext.name, ext.version, ext.__class__.__name__)
@@ -2432,11 +2459,19 @@ def extensions_step(self, fetch=False, install=True):
# real work
if install:
- ext.prerun()
- txt = ext.run()
- if txt:
- self.module_extra_extensions += txt
- ext.postrun()
+ try:
+ ext.prerun()
+ txt = ext.run()
+ if txt:
+ self.module_extra_extensions += txt
+ ext.postrun()
+ finally:
+ if not self.dry_run:
+ ext_duration = datetime.now() - start_time
+ if ext_duration.total_seconds() >= 1:
+ print_msg("\t... (took %s)", time2str(ext_duration), log=self.log, silent=self.silent)
+ elif self.logdebug or build_option('trace'):
+ print_msg("\t... (took < 1 sec)", log=self.log, silent=self.silent)
# cleanup (unload fake module, remove fake module dir)
if fake_mod_data:
@@ -2469,7 +2504,7 @@ def package_step(self):
def fix_shebang(self):
"""Fix shebang lines for specified files."""
- for lang in ['perl', 'python']:
+ for lang in ['bash', 'perl', 'python']:
shebang_regex = re.compile(r'^#![ ]*.*[/ ]%s.*' % lang)
fix_shebang_for = self.cfg['fix_%s_shebang_for' % lang]
if fix_shebang_for:
@@ -3552,6 +3587,8 @@ def run_all_steps(self, run_test_cases):
return True
steps = self.get_steps(run_test_cases=run_test_cases, iteration_count=self.det_iter_cnt())
+ # Calculate progress bar tick
+ tick = 1.0 / float(len(steps))
print_msg("building and installing %s..." % self.full_mod_name, log=self.log, silent=self.silent)
trace_msg("installation prefix: %s" % self.installdir)
@@ -3590,6 +3627,7 @@ def run_all_steps(self, run_test_cases):
print_msg("... (took %s)", time2str(step_duration), log=self.log, silent=self.silent)
elif self.logdebug or build_option('trace'):
print_msg("... (took < 1 sec)", log=self.log, silent=self.silent)
+ self.advance_progress(tick)
except StopException:
pass
@@ -3615,7 +3653,7 @@ def print_dry_run_note(loc, silent=True):
dry_run_msg(msg, silent=silent)
-def build_and_install_one(ecdict, init_env):
+def build_and_install_one(ecdict, init_env, progress_bar=None, task_id=None):
"""
Build the software
:param ecdict: dictionary contaning parsed easyconfig + metadata
@@ -3663,6 +3701,11 @@ def build_and_install_one(ecdict, init_env):
print_error("Failed to get application instance for %s (easyblock: %s): %s" % (name, easyblock, err.msg),
silent=silent)
+ # Setup progress bar
+ if progress_bar and task_id is not None:
+ app.set_progress_bar(progress_bar, task_id)
+ _log.info("Updated progress bar instance for easyblock %s", easyblock)
+
# application settings
stop = build_option('stop')
if stop is not None:
@@ -3688,8 +3731,11 @@ def build_and_install_one(ecdict, init_env):
if os.path.exists(app.installdir) and build_option('read_only_installdir') and (
build_option('rebuild') or build_option('force')):
+ enabled_write_permissions = True
# re-enable write permissions so we can install additional modules
adjust_permissions(app.installdir, stat.S_IWUSR, add=True, recursive=True)
+ else:
+ enabled_write_permissions = False
result = app.run_all_steps(run_test_cases=run_test_cases)
@@ -3697,6 +3743,9 @@ def build_and_install_one(ecdict, init_env):
# also add any extension easyblocks used during the build for reproducibility
if app.ext_instances:
copy_easyblocks_for_reprod(app.ext_instances, reprod_dir)
+ # If not already done remove the granted write permissions if we did so
+ if enabled_write_permissions and os.lstat(app.installdir)[stat.ST_MODE] & stat.S_IWUSR:
+ adjust_permissions(app.installdir, stat.S_IWUSR, add=False, recursive=True)
except EasyBuildError as err:
first_n = 300
@@ -3713,6 +3762,21 @@ def build_and_install_one(ecdict, init_env):
# successful (non-dry-run) build
if result and not dry_run:
+ def ensure_writable_log_dir(log_dir):
+ """Make sure we can write into the log dir"""
+ if build_option('read_only_installdir'):
+ # temporarily re-enable write permissions for copying log/easyconfig to install dir
+ if os.path.exists(log_dir):
+ adjust_permissions(log_dir, stat.S_IWUSR, add=True, recursive=True)
+ else:
+ parent_dir = os.path.dirname(log_dir)
+ if os.path.exists(parent_dir):
+ adjust_permissions(parent_dir, stat.S_IWUSR, add=True, recursive=False)
+ mkdir(log_dir, parents=True)
+ adjust_permissions(parent_dir, stat.S_IWUSR, add=False, recursive=False)
+ else:
+ mkdir(log_dir, parents=True)
+ adjust_permissions(log_dir, stat.S_IWUSR, add=True, recursive=True)
if app.cfg['stop']:
ended = 'STOPPED'
@@ -3720,6 +3784,7 @@ def build_and_install_one(ecdict, init_env):
new_log_dir = os.path.join(app.builddir, config.log_path(ec=app.cfg))
else:
new_log_dir = os.path.dirname(app.logfile)
+ ensure_writable_log_dir(new_log_dir)
# if we're only running the sanity check, we should not copy anything new to the installation directory
elif build_option('sanity_check_only'):
@@ -3727,14 +3792,7 @@ def build_and_install_one(ecdict, init_env):
else:
new_log_dir = os.path.join(app.installdir, config.log_path(ec=app.cfg))
- if build_option('read_only_installdir'):
- # temporarily re-enable write permissions for copying log/easyconfig to install dir
- if os.path.exists(new_log_dir):
- adjust_permissions(new_log_dir, stat.S_IWUSR, add=True, recursive=True)
- else:
- adjust_permissions(app.installdir, stat.S_IWUSR, add=True, recursive=False)
- mkdir(new_log_dir, parents=True)
- adjust_permissions(app.installdir, stat.S_IWUSR, add=False, recursive=False)
+ ensure_writable_log_dir(new_log_dir)
# collect build stats
_log.info("Collecting build stats...")
diff --git a/easybuild/framework/easyconfig/default.py b/easybuild/framework/easyconfig/default.py
index 5279332541..70571c12d5 100644
--- a/easybuild/framework/easyconfig/default.py
+++ b/easybuild/framework/easyconfig/default.py
@@ -95,6 +95,8 @@
'easybuild_version': [None, "EasyBuild-version this spec-file was written for", BUILD],
'enhance_sanity_check': [False, "Indicate that additional sanity check commands & paths should enhance "
"the existin sanity check, not replace it", BUILD],
+ 'fix_bash_shebang_for': [None, "List of files for which Bash shebang should be fixed "
+ "to '#!/usr/bin/env bash' (glob patterns supported)", BUILD],
'fix_perl_shebang_for': [None, "List of files for which Perl shebang should be fixed "
"to '#!/usr/bin/env perl' (glob patterns supported)", BUILD],
'fix_python_shebang_for': [None, "List of files for which Python shebang should be fixed "
diff --git a/easybuild/framework/easyconfig/easyconfig.py b/easybuild/framework/easyconfig/easyconfig.py
index 55948e11a2..8bc89606ce 100644
--- a/easybuild/framework/easyconfig/easyconfig.py
+++ b/easybuild/framework/easyconfig/easyconfig.py
@@ -59,7 +59,7 @@
from easybuild.framework.easyconfig.licenses import EASYCONFIG_LICENSES_DICT
from easybuild.framework.easyconfig.parser import DEPRECATED_PARAMETERS, REPLACED_PARAMETERS
from easybuild.framework.easyconfig.parser import EasyConfigParser, fetch_parameters_from_easyconfig
-from easybuild.framework.easyconfig.templates import TEMPLATE_CONSTANTS, template_constant_dict
+from easybuild.framework.easyconfig.templates import TEMPLATE_CONSTANTS, TEMPLATE_NAMES_DYNAMIC, template_constant_dict
from easybuild.tools.build_log import EasyBuildError, print_warning, print_msg
from easybuild.tools.config import GENERIC_EASYBLOCK_PKG, LOCAL_VAR_NAMING_CHECK_ERROR, LOCAL_VAR_NAMING_CHECK_LOG
from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_WARN
@@ -559,6 +559,13 @@ def disable_templating(self):
finally:
self.enable_templating = old_enable_templating
+ def __str__(self):
+ """Return a string representation of this EasyConfig instance"""
+ if self.path:
+ return '%s EasyConfig @ %s' % (self.name, self.path)
+ else:
+ return 'Raw %s EasyConfig' % self.name
+
def filename(self):
"""Determine correct filename for this easyconfig file."""
@@ -1803,6 +1810,25 @@ def asdict(self):
res[key] = value
return res
+ def get_cuda_cc_template_value(self, key):
+ """
+ Get template value based on --cuda-compute-capabilities EasyBuild configuration option
+ and cuda_compute_capabilities easyconfig parameter.
+ Returns user-friendly error message in case neither are defined,
+ or if an unknown key is used.
+ """
+ if key.startswith('cuda_') and any(x[0] == key for x in TEMPLATE_NAMES_DYNAMIC):
+ try:
+ return self.template_values[key]
+ except KeyError:
+ error_msg = "Template value '%s' is not defined!\n"
+ error_msg += "Make sure that either the --cuda-compute-capabilities EasyBuild configuration "
+ error_msg += "option is set, or that the cuda_compute_capabilities easyconfig parameter is defined."
+ raise EasyBuildError(error_msg, key)
+ else:
+ error_msg = "%s is not a template value based on --cuda-compute-capabilities/cuda_compute_capabilities"
+ raise EasyBuildError(error_msg, key)
+
def det_installversion(version, toolchain_name, toolchain_version, prefix, suffix):
"""Deprecated 'det_installversion' function, to determine exact install version, based on supplied parameters."""
diff --git a/easybuild/framework/easyconfig/tools.py b/easybuild/framework/easyconfig/tools.py
index d78ff96e7c..5f2acb9aa5 100644
--- a/easybuild/framework/easyconfig/tools.py
+++ b/easybuild/framework/easyconfig/tools.py
@@ -37,6 +37,7 @@
:author: Ward Poelmans (Ghent University)
"""
import copy
+import fnmatch
import glob
import os
import re
@@ -360,6 +361,10 @@ def det_easyconfig_paths(orig_paths):
# if no easyconfigs are specified, use all the ones touched in the PR
ec_files = [path for path in pr_files if path.endswith('.eb')]
+ filter_ecs = build_option('filter_ecs')
+ if filter_ecs:
+ ec_files = [ec for ec in ec_files
+ if not any(fnmatch.fnmatch(ec, filter_spec) for filter_spec in filter_ecs)]
if ec_files and robot_path:
ignore_subdirs = build_option('ignore_dirs')
if not build_option('consider_archived_easyconfigs'):
diff --git a/easybuild/main.py b/easybuild/main.py
index 8cbff81b41..1e5792fd0e 100644
--- a/easybuild/main.py
+++ b/easybuild/main.py
@@ -68,12 +68,15 @@
from easybuild.tools.hooks import START, END, load_hooks, run_hook
from easybuild.tools.modules import modules_tool
from easybuild.tools.options import set_up_configuration, use_color
+from easybuild.tools.output import create_progress_bar, print_checks
from easybuild.tools.robot import check_conflicts, dry_run, missing_deps, resolve_dependencies, search_easyconfigs
from easybuild.tools.package.utilities import check_pkg_support
from easybuild.tools.parallelbuild import submit_jobs
from easybuild.tools.repository.repository import init_repository
+from easybuild.tools.systemtools import check_easybuild_deps
from easybuild.tools.testing import create_test_report, overall_test_report, regtest, session_state
+
_log = None
@@ -98,24 +101,36 @@ def find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=
return [(ec_file, generated)]
-def build_and_install_software(ecs, init_session_state, exit_on_failure=True):
+def build_and_install_software(ecs, init_session_state, exit_on_failure=True, progress_bar=None):
"""
Build and install software for all provided parsed easyconfig files.
:param ecs: easyconfig files to install software with
:param init_session_state: initial session state, to use in test reports
:param exit_on_failure: whether or not to exit on installation failure
+ :param progress_bar: progress bar to use to report progress
"""
# obtain a copy of the starting environment so each build can start afresh
# we shouldn't use the environment from init_session_state, since relevant env vars might have been set since
# e.g. via easyconfig.handle_allowed_system_deps
init_env = copy.deepcopy(os.environ)
+ # Initialize progress bar with overall installation task
+ if progress_bar:
+ task_id = progress_bar.add_task("", total=len(ecs))
+ else:
+ task_id = None
+
res = []
for ec in ecs:
+
+ if progress_bar:
+ progress_bar.update(task_id, description=ec['short_mod_name'])
+
ec_res = {}
try:
- (ec_res['success'], app_log, err) = build_and_install_one(ec, init_env)
+ (ec_res['success'], app_log, err) = build_and_install_one(ec, init_env, progress_bar=progress_bar,
+ task_id=task_id)
ec_res['log_file'] = app_log
if not ec_res['success']:
ec_res['err'] = EasyBuildError(err)
@@ -245,6 +260,9 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None):
search_easyconfigs(search_query, short=options.search_short, filename_only=options.search_filename,
terse=options.terse)
+ if options.check_eb_deps:
+ print_checks(check_easybuild_deps(modtool))
+
# GitHub options that warrant a silent cleanup & exit
if options.check_github:
check_github()
@@ -283,6 +301,7 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None):
# non-verbose cleanup after handling GitHub integration stuff or printing terse info
early_stop_options = [
options.add_pr_labels,
+ options.check_eb_deps,
options.check_github,
options.create_index,
options.install_github_token,
@@ -521,13 +540,20 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None):
if not testing or (testing and do_build):
exit_on_failure = not (options.dump_test_report or options.upload_test_report)
- ecs_with_res = build_and_install_software(ordered_ecs, init_session_state, exit_on_failure=exit_on_failure)
+ progress_bar = create_progress_bar()
+ with progress_bar:
+ ecs_with_res = build_and_install_software(ordered_ecs, init_session_state,
+ exit_on_failure=exit_on_failure,
+ progress_bar=progress_bar)
else:
ecs_with_res = [(ec, {}) for ec in ordered_ecs]
correct_builds_cnt = len([ec_res for (_, ec_res) in ecs_with_res if ec_res.get('success', False)])
overall_success = correct_builds_cnt == len(ordered_ecs)
- success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs))
+ success_msg = "Build succeeded "
+ if build_option('ignore_test_failure'):
+ success_msg += "(with --ignore-test-failure) "
+ success_msg += "for %s out of %s" % (correct_builds_cnt, len(ordered_ecs))
repo = init_repository(get_repository(), get_repositorypath())
repo.cleanup()
diff --git a/easybuild/toolchains/fft/fftw.py b/easybuild/toolchains/fft/fftw.py
index 4b6c32dcb9..ece375253a 100644
--- a/easybuild/toolchains/fft/fftw.py
+++ b/easybuild/toolchains/fft/fftw.py
@@ -71,7 +71,7 @@ def _set_fft_variables(self):
# TODO can these be replaced with the FFT ones?
self.variables.join('FFTW_INC_DIR', 'FFT_INC_DIR')
self.variables.join('FFTW_LIB_DIR', 'FFT_LIB_DIR')
- if 'FFT_STATIC_LIBS' in self.variables:
- self.variables.join('FFTW_STATIC_LIBS', 'FFT_STATIC_LIBS')
- if 'FFT_STATIC_LIBS_MT' in self.variables:
- self.variables.join('FFTW_STATIC_LIBS_MT', 'FFT_STATIC_LIBS_MT')
+
+ for key in ('SHARED_LIBS', 'SHARED_LIBS_MT', 'STATIC_LIBS', 'STATIC_LIBS_MT'):
+ if 'FFT_' + key in self.variables:
+ self.variables.join('FFTW_' + key, 'FFT_' + key)
diff --git a/easybuild/toolchains/nvpsmpic.py b/easybuild/toolchains/nvpsmpic.py
new file mode 100644
index 0000000000..e4120344c6
--- /dev/null
+++ b/easybuild/toolchains/nvpsmpic.py
@@ -0,0 +1,43 @@
+##
+# Copyright 2016-2021 Ghent University
+# Copyright 2016-2021 Forschungszentrum Juelich
+#
+# This file is part of EasyBuild,
+# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
+# with support of Ghent University (http://ugent.be/hpc),
+# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
+# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
+# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
+#
+# http://github.com/hpcugent/easybuild
+#
+# EasyBuild is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation v2.
+#
+# EasyBuild is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with EasyBuild. If not, see .
+##
+"""
+EasyBuild support for npsmpi compiler toolchain (includes NVHPC and ParaStationMPI, and CUDA as dependency).
+
+:author: Damian Alvarez (Forschungszentrum Juelich)
+:author: Sebastian Achilles (Forschungszentrum Juelich)
+"""
+
+from easybuild.toolchains.nvhpc import NVHPCToolchain
+# We pull in MPI and CUDA at once so this maps nicely to HMNS
+from easybuild.toolchains.mpi.psmpi import Psmpi
+from easybuild.toolchains.compiler.cuda import Cuda
+
+
+# Order matters!
+class NVpsmpic(NVHPCToolchain, Cuda, Psmpi):
+ """Compiler toolchain with NVHPC and ParaStationMPI, with CUDA as dependency."""
+ NAME = 'nvpsmpic'
+ SUBTOOLCHAIN = NVHPCToolchain.NAME
diff --git a/easybuild/tools/config.py b/easybuild/tools/config.py
index 23f3c97f54..18902ae799 100644
--- a/easybuild/tools/config.py
+++ b/easybuild/tools/config.py
@@ -48,6 +48,12 @@
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.py2vs3 import ascii_letters, create_base_metaclass, string_type
+try:
+ import rich # noqa
+ HAVE_RICH = True
+except ImportError:
+ HAVE_RICH = False
+
_log = fancylogger.getLogger('config', fname=False)
@@ -137,6 +143,13 @@
LOCAL_VAR_NAMING_CHECKS = [LOCAL_VAR_NAMING_CHECK_ERROR, LOCAL_VAR_NAMING_CHECK_LOG, LOCAL_VAR_NAMING_CHECK_WARN]
+OUTPUT_STYLE_AUTO = 'auto'
+OUTPUT_STYLE_BASIC = 'basic'
+OUTPUT_STYLE_NO_COLOR = 'no_color'
+OUTPUT_STYLE_RICH = 'rich'
+OUTPUT_STYLES = (OUTPUT_STYLE_AUTO, OUTPUT_STYLE_BASIC, OUTPUT_STYLE_NO_COLOR, OUTPUT_STYLE_RICH)
+
+
class Singleton(ABCMeta):
"""Serves as metaclass for classes that should implement the Singleton pattern.
@@ -180,6 +193,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX):
'envvars_user_modules',
'extra_modules',
'filter_deps',
+ 'filter_ecs',
'filter_env_vars',
'hide_deps',
'hide_toolchains',
@@ -291,6 +305,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX):
'map_toolchains',
'modules_tool_version_check',
'pre_create_installdir',
+ 'show_progress_bar',
],
WARN: [
'check_ebroot_env_vars',
@@ -340,6 +355,9 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX):
DEFAULT_WAIT_ON_LOCK_INTERVAL: [
'wait_on_lock_interval',
],
+ OUTPUT_STYLE_AUTO: [
+ 'output_style',
+ ],
}
# build option that do not have a perfectly matching command line option
BUILD_OPTIONS_OTHER = {
@@ -686,6 +704,22 @@ def get_module_syntax():
return ConfigurationVariables()['module_syntax']
+def get_output_style():
+ """Return output style to use."""
+ output_style = build_option('output_style')
+
+ if output_style == OUTPUT_STYLE_AUTO:
+ if HAVE_RICH:
+ output_style = OUTPUT_STYLE_RICH
+ else:
+ output_style = OUTPUT_STYLE_BASIC
+
+ if output_style == OUTPUT_STYLE_RICH and not HAVE_RICH:
+ raise EasyBuildError("Can't use '%s' output style, Rich Python package is not available!", OUTPUT_STYLE_RICH)
+
+ return output_style
+
+
def log_file_format(return_directory=False, ec=None, date=None, timestamp=None):
"""
Return the format for the logfile or the directory
diff --git a/easybuild/tools/filetools.py b/easybuild/tools/filetools.py
index 1cb65f826f..0e4afabe1a 100644
--- a/easybuild/tools/filetools.py
+++ b/easybuild/tools/filetools.py
@@ -44,6 +44,7 @@
import hashlib
import imp
import inspect
+import itertools
import os
import re
import shutil
@@ -2340,7 +2341,28 @@ def copy_files(paths, target_path, force_in_dry_run=False, target_single_file=Fa
raise EasyBuildError("One or more files to copy should be specified!")
-def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **kwargs):
+def has_recursive_symlinks(path):
+ """
+ Check the given directory for recursive symlinks.
+
+ That means symlinks to folders inside the path which would cause infinite loops when traversed regularily.
+
+ :param path: Path to directory to check
+ """
+ for dirpath, dirnames, filenames in os.walk(path, followlinks=True):
+ for name in itertools.chain(dirnames, filenames):
+ fullpath = os.path.join(dirpath, name)
+ if os.path.islink(fullpath):
+ linkpath = os.path.realpath(fullpath)
+ fullpath += os.sep # To catch the case where both are equal
+ if fullpath.startswith(linkpath + os.sep):
+ _log.info("Recursive symlink detected at %s", fullpath)
+ return True
+ return False
+
+
+def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, check_for_recursive_symlinks=True,
+ **kwargs):
"""
Copy a directory from specified location to specified location
@@ -2348,6 +2370,7 @@ def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **k
:param target_path: path to copy the directory to
:param force_in_dry_run: force running the command during dry run
:param dirs_exist_ok: boolean indicating whether it's OK if the target directory already exists
+ :param check_for_recursive_symlinks: If symlink arg is not given or False check for recursive symlinks first
shutil.copytree is used if the target path does not exist yet;
if the target path already exists, the 'copy' function will be used to copy the contents of
@@ -2359,6 +2382,13 @@ def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **k
dry_run_msg("copied directory %s to %s" % (path, target_path))
else:
try:
+ if check_for_recursive_symlinks and not kwargs.get('symlinks'):
+ if has_recursive_symlinks(path):
+ raise EasyBuildError("Recursive symlinks detected in %s. "
+ "Will not try copying this unless `symlinks=True` is passed",
+ path)
+ else:
+ _log.debug("No recursive symlinks in %s", path)
if not dirs_exist_ok and os.path.exists(target_path):
raise EasyBuildError("Target location %s to copy %s to already exists", target_path, path)
@@ -2386,7 +2416,9 @@ def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **k
paths_to_copy = [os.path.join(path, x) for x in entries]
copy(paths_to_copy, target_path,
- force_in_dry_run=force_in_dry_run, dirs_exist_ok=dirs_exist_ok, **kwargs)
+ force_in_dry_run=force_in_dry_run, dirs_exist_ok=dirs_exist_ok,
+ check_for_recursive_symlinks=False, # Don't check again
+ **kwargs)
else:
# if dirs_exist_ok is not enabled or target directory doesn't exist, just use shutil.copytree
@@ -2471,17 +2503,24 @@ def get_source_tarball_from_git(filename, targetdir, git_config):
# compose 'git clone' command, and run it
clone_cmd = ['git', 'clone']
+ if not keep_git_dir:
+ # Speed up cloning by only fetching the most recent commit, not the whole history
+ # When we don't want to keep the .git folder there won't be a difference in the result
+ clone_cmd.extend(['--depth', '1'])
+
if tag:
clone_cmd.extend(['--branch', tag])
-
- if recursive:
- clone_cmd.append('--recursive')
+ if recursive:
+ clone_cmd.append('--recursive')
+ else:
+ # checkout is done separately below for specific commits
+ clone_cmd.append('--no-checkout')
clone_cmd.append('%s/%s.git' % (url, repo_name))
tmpdir = tempfile.mkdtemp()
cwd = change_dir(tmpdir)
- run.run_cmd(' '.join(clone_cmd), log_all=True, log_ok=False, simple=False, regexp=False)
+ run.run_cmd(' '.join(clone_cmd), log_all=True, simple=True, regexp=False)
# if a specific commit is asked for, check it out
if commit:
@@ -2489,14 +2528,40 @@ def get_source_tarball_from_git(filename, targetdir, git_config):
if recursive:
checkout_cmd.extend(['&&', 'git', 'submodule', 'update', '--init', '--recursive'])
- run.run_cmd(' '.join(checkout_cmd), log_all=True, log_ok=False, simple=False, regexp=False, path=repo_name)
+ run.run_cmd(' '.join(checkout_cmd), log_all=True, simple=True, regexp=False, path=repo_name)
+
+ elif not build_option('extended_dry_run'):
+ # If we wanted to get a tag make sure we actually got a tag and not a branch with the same name
+ # This doesn't make sense in dry-run mode as we don't have anything to check
+ cmd = 'git describe --exact-match --tags HEAD'
+ # Note: Disable logging to also disable the error handling in run_cmd
+ (out, ec) = run.run_cmd(cmd, log_ok=False, log_all=False, regexp=False, path=repo_name)
+ if ec != 0 or tag not in out.splitlines():
+ print_warning('Tag %s was not downloaded in the first try due to %s/%s containing a branch'
+ ' with the same name. You might want to alert the maintainers of %s about that issue.',
+ tag, url, repo_name, repo_name)
+ cmds = []
+
+ if not keep_git_dir:
+ # make the repo unshallow first;
+ # this is equivalent with 'git fetch -unshallow' in Git 1.8.3+
+ # (first fetch seems to do nothing, unclear why)
+ cmds.append('git fetch --depth=2147483647 && git fetch --depth=2147483647')
+
+ cmds.append('git checkout refs/tags/' + tag)
+ # Clean all untracked files, e.g. from left-over submodules
+ cmds.append('git clean --force -d -x')
+ if recursive:
+ cmds.append('git submodule update --init --recursive')
+ for cmd in cmds:
+ run.run_cmd(cmd, log_all=True, simple=True, regexp=False, path=repo_name)
# create an archive and delete the git repo directory
if keep_git_dir:
tar_cmd = ['tar', 'cfvz', targetpath, repo_name]
else:
tar_cmd = ['tar', 'cfvz', targetpath, '--exclude', '.git', repo_name]
- run.run_cmd(' '.join(tar_cmd), log_all=True, log_ok=False, simple=False, regexp=False)
+ run.run_cmd(' '.join(tar_cmd), log_all=True, simple=True, regexp=False)
# cleanup (repo_name dir does not exist in dry run mode)
change_dir(cwd)
diff --git a/easybuild/tools/github.py b/easybuild/tools/github.py
index 8259de1587..5773d39a49 100644
--- a/easybuild/tools/github.py
+++ b/easybuild/tools/github.py
@@ -34,6 +34,7 @@
import getpass
import glob
import functools
+import itertools
import os
import random
import re
@@ -839,7 +840,7 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_
# copy easyconfig files to right place
target_dir = os.path.join(git_working_dir, pr_target_repo)
print_msg("copying files to %s..." % target_dir)
- file_info = COPY_FUNCTIONS[pr_target_repo](ec_paths, os.path.join(git_working_dir, pr_target_repo))
+ file_info = COPY_FUNCTIONS[pr_target_repo](ec_paths, target_dir)
# figure out commit message to use
if commit_msg:
@@ -901,6 +902,8 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_
if pr_branch is None:
if ec_paths and pr_target_repo == GITHUB_EASYCONFIGS_REPO:
label = file_info['ecs'][0].name + re.sub('[.-]', '', file_info['ecs'][0].version)
+ elif pr_target_repo == GITHUB_EASYBLOCKS_REPO and paths.get('py_files'):
+ label = os.path.splitext(os.path.basename(paths['py_files'][0]))[0]
else:
label = ''.join(random.choice(ascii_letters) for _ in range(10))
pr_branch = '%s_new_pr_%s' % (time.strftime("%Y%m%d%H%M%S"), label)
@@ -1013,10 +1016,14 @@ def is_patch_for(patch_name, ec):
patches = copy.copy(ec['patches'])
- for ext in ec['exts_list']:
- if isinstance(ext, (list, tuple)) and len(ext) == 3 and isinstance(ext[2], dict):
- ext_options = ext[2]
- patches.extend(ext_options.get('patches', []))
+ with ec.disable_templating():
+ # take into account both list of extensions (via exts_list) and components (cfr. Bundle easyblock)
+ for entry in itertools.chain(ec['exts_list'], ec.get('components', [])):
+ if isinstance(entry, (list, tuple)) and len(entry) == 3 and isinstance(entry[2], dict):
+ templates = {'name': entry[0], 'version': entry[1]}
+ options = entry[2]
+ patches.extend(p[0] % templates if isinstance(p, (tuple, list)) else p % templates
+ for p in options.get('patches', []))
for patch in patches:
if isinstance(patch, (tuple, list)):
@@ -1069,21 +1076,43 @@ def find_software_name_for_patch(patch_name, ec_dirs):
soft_name = None
+ ignore_dirs = build_option('ignore_dirs')
all_ecs = []
for ec_dir in ec_dirs:
- for (dirpath, _, filenames) in os.walk(ec_dir):
+ for (dirpath, dirnames, filenames) in os.walk(ec_dir):
+ # Exclude ignored dirs
+ if ignore_dirs:
+ dirnames[:] = [i for i in dirnames if i not in ignore_dirs]
for fn in filenames:
- if fn != 'TEMPLATE.eb' and not fn.endswith('.py'):
+ # TODO: In EasyBuild 5.x only check for '*.eb' files
+ if fn != 'TEMPLATE.eb' and os.path.splitext(fn)[1] not in ('.py', '.patch'):
path = os.path.join(dirpath, fn)
rawtxt = read_file(path)
if 'patches' in rawtxt:
all_ecs.append(path)
+ # Usual patch names are -_fix_foo.patch
+ # So search those ECs first
+ patch_stem = os.path.splitext(patch_name)[0]
+ # Extract possible sw name and version according to above scheme
+ # Those might be the same as the whole patch stem, which is OK
+ possible_sw_name = patch_stem.split('-')[0].lower()
+ possible_sw_name_version = patch_stem.split('_')[0].lower()
+
+ def ec_key(path):
+ filename = os.path.basename(path).lower()
+ # Put files with one of those as the prefix first, then sort by name
+ return (
+ not filename.startswith(possible_sw_name_version),
+ not filename.startswith(possible_sw_name),
+ filename
+ )
+ all_ecs.sort(key=ec_key)
+
nr_of_ecs = len(all_ecs)
for idx, path in enumerate(all_ecs):
if soft_name:
break
- rawtxt = read_file(path)
try:
ecs = process_easyconfig(path, validate=False)
for ec in ecs:
diff --git a/easybuild/tools/include.py b/easybuild/tools/include.py
index 34390d632a..31aecb9997 100644
--- a/easybuild/tools/include.py
+++ b/easybuild/tools/include.py
@@ -128,15 +128,17 @@ def set_up_eb_package(parent_path, eb_pkg_name, subpkgs=None, pkg_init_body=None
def verify_imports(pymods, pypkg, from_path):
"""Verify that import of specified modules from specified package and expected location works."""
- for pymod in pymods:
- pymod_spec = '%s.%s' % (pypkg, pymod)
-
+ pymod_specs = ['%s.%s' % (pypkg, pymod) for pymod in pymods]
+ for pymod_spec in pymod_specs:
# force re-import if the specified modules was already imported;
# this is required to ensure that an easyblock that is included via --include-easyblocks-from-pr
# gets preference over one that is included via --include-easyblocks
if pymod_spec in sys.modules:
del sys.modules[pymod_spec]
+ # After all modules to be reloaded have been removed, import them again
+ # Note that removing them here may delete transitively loaded modules and not import them again
+ for pymod_spec in pymod_specs:
try:
pymod = __import__(pymod_spec, fromlist=[pypkg])
# different types of exceptions may be thrown, not only ImportErrors
@@ -180,8 +182,8 @@ def include_easyblocks(tmpdir, paths):
if not os.path.exists(target_path):
symlink(easyblock_module, target_path)
- included_ebs = [x for x in os.listdir(easyblocks_dir) if x not in ['__init__.py', 'generic']]
- included_generic_ebs = [x for x in os.listdir(os.path.join(easyblocks_dir, 'generic')) if x != '__init__.py']
+ included_ebs = sorted(x for x in os.listdir(easyblocks_dir) if x not in ['__init__.py', 'generic'])
+ included_generic_ebs = sorted(x for x in os.listdir(os.path.join(easyblocks_dir, 'generic')) if x != '__init__.py')
_log.debug("Included generic easyblocks: %s", included_generic_ebs)
_log.debug("Included software-specific easyblocks: %s", included_ebs)
diff --git a/easybuild/tools/modules.py b/easybuild/tools/modules.py
index 4ee0cd8674..0860b810d8 100644
--- a/easybuild/tools/modules.py
+++ b/easybuild/tools/modules.py
@@ -187,7 +187,6 @@ def __init__(self, mod_paths=None, testing=False):
self.cmd = env_cmd_path
self.log.debug("Set %s command via environment variable %s: %s",
self.NAME, self.COMMAND_ENVIRONMENT, self.cmd)
- # check whether paths obtained via $PATH and $LMOD_CMD are different
elif cmd_path != env_cmd_path:
self.log.debug("Different paths found for %s command '%s' via which/$PATH and $%s: %s vs %s",
self.NAME, self.COMMAND, self.COMMAND_ENVIRONMENT, cmd_path, env_cmd_path)
@@ -208,6 +207,15 @@ def __init__(self, mod_paths=None, testing=False):
self.set_and_check_version()
self.supports_depends_on = False
+ def __str__(self):
+ """String representation of this ModulesTool instance."""
+ res = self.NAME
+ if self.version:
+ res += ' ' + self.version
+ else:
+ res += ' (unknown version)'
+ return res
+
def buildstats(self):
"""Return tuple with data to be included in buildstats"""
return (self.NAME, self.cmd, self.version)
@@ -801,7 +809,7 @@ def run_module(self, *args, **kwargs):
else:
args = list(args)
- self.log.debug('Current MODULEPATH: %s' % os.environ.get('MODULEPATH', ''))
+ self.log.debug('Current MODULEPATH: %s' % os.environ.get('MODULEPATH', ''))
# restore selected original environment variables before running module command
environ = os.environ.copy()
@@ -1178,7 +1186,7 @@ def update(self):
class EnvironmentModulesC(ModulesTool):
"""Interface to (C) environment modules (modulecmd)."""
- NAME = "Environment Modules v3"
+ NAME = "Environment Modules"
COMMAND = "modulecmd"
REQ_VERSION = '3.2.10'
MAX_VERSION = '3.99'
@@ -1313,8 +1321,9 @@ def remove_module_path(self, path, set_mod_paths=True):
class EnvironmentModules(EnvironmentModulesTcl):
"""Interface to environment modules 4.0+"""
- NAME = "Environment Modules v4"
+ NAME = "Environment Modules"
COMMAND = os.path.join(os.getenv('MODULESHOME', 'MODULESHOME_NOT_DEFINED'), 'libexec', 'modulecmd.tcl')
+ COMMAND_ENVIRONMENT = 'MODULES_CMD'
REQ_VERSION = '4.0.0'
MAX_VERSION = None
VERSION_REGEXP = r'^Modules\s+Release\s+(?P\d\S*)\s'
diff --git a/easybuild/tools/options.py b/easybuild/tools/options.py
index 7dadcd415d..8ed73b66d5 100644
--- a/easybuild/tools/options.py
+++ b/easybuild/tools/options.py
@@ -69,7 +69,8 @@
from easybuild.tools.config import DEFAULT_REPOSITORY, DEFAULT_WAIT_ON_LOCK_INTERVAL, DEFAULT_WAIT_ON_LOCK_LIMIT
from easybuild.tools.config import EBROOT_ENV_VAR_ACTIONS, ERROR, FORCE_DOWNLOAD_CHOICES, GENERAL_CLASS, IGNORE
from easybuild.tools.config import JOB_DEPS_TYPE_ABORT_ON_ERROR, JOB_DEPS_TYPE_ALWAYS_RUN, LOADED_MODULES_ACTIONS
-from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_WARN, LOCAL_VAR_NAMING_CHECKS, WARN
+from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_WARN, LOCAL_VAR_NAMING_CHECKS
+from easybuild.tools.config import OUTPUT_STYLE_AUTO, OUTPUT_STYLES, WARN
from easybuild.tools.config import get_pretend_installpath, init, init_build_options, mk_full_default_path
from easybuild.tools.configobj import ConfigObj, ConfigObjError
from easybuild.tools.docs import FORMAT_TXT, FORMAT_RST
@@ -396,6 +397,9 @@ def override_options(self):
'filter-deps': ("List of dependencies that you do *not* want to install with EasyBuild, "
"because equivalent OS packages are installed. (e.g. --filter-deps=zlib,ncurses)",
'strlist', 'extend', None),
+ 'filter-ecs': ("List of easyconfigs (given as glob patterns) to *ignore* when given on command line "
+ "or auto-selected when building with --from-pr. (e.g. --filter-ecs=*intel*)",
+ 'strlist', 'extend', None),
'filter-env-vars': ("List of names of environment variables that should *not* be defined/updated by "
"module files generated by EasyBuild", 'strlist', 'extend', None),
'fixed-installdir-naming-scheme': ("Use fixed naming scheme for installation directories", None,
@@ -403,6 +407,8 @@ def override_options(self):
'force-download': ("Force re-downloading of sources and/or patches, "
"even if they are available already in source path",
'choice', 'store_or_None', DEFAULT_FORCE_DOWNLOAD, FORCE_DOWNLOAD_CHOICES),
+ 'generate-devel-module': ("Generate a develop module file, implies --force if disabled",
+ None, 'store_true', True),
'group': ("Group to be used for software installations (only verified, not set)", None, 'store', None),
'group-writable-installdir': ("Enable group write permissions on installation directory after installation",
None, 'store_true', False),
@@ -443,6 +449,9 @@ def override_options(self):
'optarch': ("Set architecture optimization, overriding native architecture optimizations",
None, 'store', None),
'output-format': ("Set output format", 'choice', 'store', FORMAT_TXT, [FORMAT_TXT, FORMAT_RST]),
+ 'output-style': ("Control output style; auto implies using Rich if available to produce rich output, "
+ "with fallback to basic colored output",
+ 'choice', 'store', OUTPUT_STYLE_AUTO, OUTPUT_STYLES),
'parallel': ("Specify (maximum) level of parallellism used during build procedure",
'int', 'store', None),
'pre-create-installdir': ("Create installation directory before submitting build jobs",
@@ -465,13 +474,12 @@ def override_options(self):
None, 'store_true', False),
'set-default-module': ("Set the generated module as default", None, 'store_true', False),
'set-gid-bit': ("Set group ID bit on newly created directories", None, 'store_true', False),
+ 'show-progress-bar': ("Show progress bar in terminal output", None, 'store_true', True),
'silence-deprecation-warnings': ("Silence specified deprecation warnings", 'strlist', 'extend', None),
- 'sticky-bit': ("Set sticky bit on newly created directories", None, 'store_true', False),
'skip-extensions': ("Skip installation of extensions", None, 'store_true', False),
'skip-test-cases': ("Skip running test cases", None, 'store_true', False, 't'),
'skip-test-step': ("Skip running the test step (e.g. unit tests)", None, 'store_true', False),
- 'generate-devel-module': ("Generate a develop module file, implies --force if disabled",
- None, 'store_true', True),
+ 'sticky-bit': ("Set sticky bit on newly created directories", None, 'store_true', False),
'sysroot': ("Location root directory of system, prefix for standard paths like /usr/lib and /usr/include",
None, 'store', None),
'trace': ("Provide more information in output to stdout on progress", None, 'store_true', False, 'T'),
@@ -613,6 +621,8 @@ def informative_options(self):
'avail-hooks': ("Show list of known hooks", None, 'store_true', False),
'avail-toolchain-opts': ("Show options for toolchain", 'str', 'store', None),
'check-conflicts': ("Check for version conflicts in dependency graphs", None, 'store_true', False),
+ 'check-eb-deps': ("Check presence and version of (required and optional) EasyBuild dependencies",
+ None, 'store_true', False),
'dep-graph': ("Create dependency graph", None, 'store', None, {'metavar': 'depgraph.'}),
'dump-env-script': ("Dump source script to set up build environment based on toolchain/dependencies",
None, 'store_true', False),
@@ -1043,8 +1053,52 @@ def _postprocess_checks(self):
self.log.info("Checks on configuration options passed")
+ def get_cfg_opt_abs_path(self, opt_name, path):
+ """Get path value of configuration option as absolute path."""
+ if os.path.isabs(path):
+ abs_path = path
+ else:
+ abs_path = os.path.abspath(path)
+ self.log.info("Relative path value for '%s' configuration option resolved to absolute path: %s",
+ opt_name, abs_path)
+ return abs_path
+
+ def _ensure_abs_path(self, opt_name):
+ """Ensure that path value for specified configuration option is an absolute path."""
+
+ opt_val = getattr(self.options, opt_name)
+ if opt_val:
+ if isinstance(opt_val, string_type):
+ setattr(self.options, opt_name, self.get_cfg_opt_abs_path(opt_name, opt_val))
+ elif isinstance(opt_val, list):
+ abs_paths = [self.get_cfg_opt_abs_path(opt_name, p) for p in opt_val]
+ setattr(self.options, opt_name, abs_paths)
+ else:
+ error_msg = "Don't know how to ensure absolute path(s) for '%s' configuration option (value type: %s)"
+ raise EasyBuildError(error_msg, opt_name, type(opt_val))
+
def _postprocess_config(self):
"""Postprocessing of configuration options"""
+
+ # resolve relative paths for configuration options that specify a location;
+ # ensuring absolute paths for 'robot' is handled separately below,
+ # because we need to be careful with the argument pass to --robot
+ path_opt_names = ['buildpath', 'containerpath', 'git_working_dirs_path', 'installpath',
+ 'installpath_modules', 'installpath_software', 'prefix', 'packagepath',
+ 'robot_paths', 'sourcepath']
+
+ # repositorypath is a special case: only first part is a path;
+ # 2nd (optional) part is a relative subdir and should not be resolved to an absolute path!
+ repositorypath = self.options.repositorypath
+ if isinstance(repositorypath, (list, tuple)) and len(repositorypath) == 2:
+ abs_path = self.get_cfg_opt_abs_path('repositorypath', repositorypath[0])
+ self.options.repositorypath = (abs_path, repositorypath[1])
+ else:
+ path_opt_names.append('repositorypath')
+
+ for opt_name in path_opt_names:
+ self._ensure_abs_path(opt_name)
+
if self.options.prefix is not None:
# prefix applies to all paths, and repository has to be reinitialised to take new repositorypath in account
# in the legacy-style configuration, repository is initialised in configuration file itself
@@ -1087,7 +1141,7 @@ def _postprocess_config(self):
# paths specified to --robot have preference over --robot-paths
# keep both values in sync if robot is enabled, which implies enabling dependency resolver
- self.options.robot_paths = [os.path.abspath(path) for path in self.options.robot + self.options.robot_paths]
+ self.options.robot_paths = [os.path.abspath(p) for p in self.options.robot] + self.options.robot_paths
self.options.robot = self.options.robot_paths
# Update the search_paths (if any) to absolute paths
@@ -1392,7 +1446,7 @@ def parse_options(args=None, with_include=True):
eb_go = EasyBuildOptions(usage=usage, description=description, prog='eb', envvar_prefix=CONFIG_ENV_VAR_PREFIX,
go_args=eb_args, error_env_options=True, error_env_option_method=raise_easybuilderror,
with_include=with_include)
- except Exception as err:
+ except EasyBuildError as err:
raise EasyBuildError("Failed to parse configuration options: %s" % err)
return eb_go
@@ -1756,24 +1810,38 @@ def set_tmpdir(tmpdir=None, raise_error=False):
# reset to make sure tempfile picks up new temporary directory to use
tempfile.tempdir = None
- # test if temporary directory allows to execute files, warn if it doesn't
- try:
- fd, tmptest_file = tempfile.mkstemp()
- os.close(fd)
- os.chmod(tmptest_file, 0o700)
- if not run_cmd(tmptest_file, simple=True, log_ok=False, regexp=False, force_in_dry_run=True, trace=False,
- stream_output=False):
- msg = "The temporary directory (%s) does not allow to execute files. " % tempfile.gettempdir()
- msg += "This can cause problems in the build process, consider using --tmpdir."
- if raise_error:
- raise EasyBuildError(msg)
+ # cache for checked paths, via function attribute
+ executable_tmp_paths = getattr(set_tmpdir, 'executable_tmp_paths', [])
+
+ # Skip the executable check if it already succeeded for any parent folder
+ # Especially important for the unit test suite, less so for actual execution
+ if not any(current_tmpdir.startswith(path) for path in executable_tmp_paths):
+
+ # test if temporary directory allows to execute files, warn if it doesn't
+ try:
+ fd, tmptest_file = tempfile.mkstemp()
+ os.close(fd)
+ os.chmod(tmptest_file, 0o700)
+ if not run_cmd(tmptest_file, simple=True, log_ok=False, regexp=False, force_in_dry_run=True, trace=False,
+ stream_output=False):
+ msg = "The temporary directory (%s) does not allow to execute files. " % tempfile.gettempdir()
+ msg += "This can cause problems in the build process, consider using --tmpdir."
+ if raise_error:
+ raise EasyBuildError(msg)
+ else:
+ _log.warning(msg)
else:
- _log.warning(msg)
- else:
- _log.debug("Temporary directory %s allows to execute files, good!" % tempfile.gettempdir())
- os.remove(tmptest_file)
+ _log.debug("Temporary directory %s allows to execute files, good!" % tempfile.gettempdir())
- except OSError as err:
- raise EasyBuildError("Failed to test whether temporary directory allows to execute files: %s", err)
+ # Put this folder into the cache
+ executable_tmp_paths.append(current_tmpdir)
+
+ # set function attribute so we can retrieve cache later
+ set_tmpdir.executable_tmp_paths = executable_tmp_paths
+
+ os.remove(tmptest_file)
+
+ except OSError as err:
+ raise EasyBuildError("Failed to test whether temporary directory allows to execute files: %s", err)
return current_tmpdir
diff --git a/easybuild/tools/output.py b/easybuild/tools/output.py
new file mode 100644
index 0000000000..fb9ad176c4
--- /dev/null
+++ b/easybuild/tools/output.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+# #
+# Copyright 2021-2021 Ghent University
+#
+# This file is part of EasyBuild,
+# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
+# with support of Ghent University (http://ugent.be/hpc),
+# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
+# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
+# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
+#
+# https://github.com/easybuilders/easybuild
+#
+# EasyBuild is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation v2.
+#
+# EasyBuild is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with EasyBuild. If not, see .
+# #
+"""
+Tools for controlling output to terminal produced by EasyBuild.
+
+:author: Kenneth Hoste (Ghent University)
+:author: Jørgen Nordmoen (University of Oslo)
+"""
+import random
+
+from easybuild.tools.config import OUTPUT_STYLE_RICH, build_option, get_output_style
+from easybuild.tools.py2vs3 import OrderedDict
+
+try:
+ from rich.console import Console
+ from rich.table import Table
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
+except ImportError:
+ pass
+
+
+class DummyProgress(object):
+ """Shim for Rich's Progress class."""
+
+ # __enter__ and __exit__ must be implemented to allow use as context manager
+ def __enter__(self, *args, **kwargs):
+ pass
+
+ def __exit__(self, *args, **kwargs):
+ pass
+
+ # dummy implementations for methods supported by rich.progress.Progress class
+ def add_task(self, *args, **kwargs):
+ pass
+
+ def update(self, *args, **kwargs):
+ pass
+
+
+def use_rich():
+ """Return whether or not to use Rich to produce rich output."""
+ return get_output_style() == OUTPUT_STYLE_RICH
+
+
+def create_progress_bar():
+ """
+ Create progress bar to display overall progress.
+
+ Returns rich.progress.Progress instance if the Rich Python package is available,
+ or a shim DummyProgress instance otherwise.
+ """
+ if use_rich() and build_option('show_progress_bar'):
+
+ # pick random spinner, from a selected subset of available spinner (see 'python3 -m rich.spinner')
+ spinner = random.choice(('aesthetic', 'arc', 'bounce', 'dots', 'line', 'monkey', 'point', 'simpleDots'))
+
+ progress_bar = Progress(
+ SpinnerColumn(spinner),
+ "[progress.percentage]{task.percentage:>3.1f}%",
+ TextColumn("[bold blue]Installing {task.description} ({task.completed:.0f}/{task.total} done)"),
+ BarColumn(bar_width=None),
+ TimeElapsedColumn(),
+ transient=True,
+ expand=True,
+ )
+ else:
+ progress_bar = DummyProgress()
+
+ return progress_bar
+
+
+def print_checks(checks_data):
+ """Print overview of checks that were made."""
+
+ col_titles = checks_data.pop('col_titles', ('name', 'info', 'description'))
+
+ col2_label = col_titles[1]
+
+ if use_rich():
+ console = Console()
+ # don't use console.print, which causes SyntaxError in Python 2
+ console_print = getattr(console, 'print') # noqa: B009
+ console_print('')
+
+ for section in checks_data:
+ section_checks = checks_data[section]
+
+ if use_rich():
+ table = Table(title=section)
+ table.add_column(col_titles[0])
+ table.add_column(col_titles[1])
+ # only add 3rd column if there's any information to include in it
+ if any(x[1] for x in section_checks.values()):
+ table.add_column(col_titles[2])
+ else:
+ lines = [
+ '',
+ section + ':',
+ '-' * (len(section) + 1),
+ '',
+ ]
+
+ if isinstance(section_checks, OrderedDict):
+ check_names = section_checks.keys()
+ else:
+ check_names = sorted(section_checks, key=lambda x: x.lower())
+
+ if use_rich():
+ for check_name in check_names:
+ (info, descr) = checks_data[section][check_name]
+ if info is None:
+ info = ':yellow_circle: [yellow]%s?!' % col2_label
+ elif info is False:
+ info = ':cross_mark: [red]not found'
+ else:
+ info = ':white_heavy_check_mark: [green]%s' % info
+ if descr:
+ table.add_row(check_name.rstrip(':'), info, descr)
+ else:
+ table.add_row(check_name.rstrip(':'), info)
+ else:
+ for check_name in check_names:
+ (info, descr) = checks_data[section][check_name]
+ if info is None:
+ info = '(found, UNKNOWN %s)' % col2_label
+ elif info is False:
+ info = '(NOT FOUND)'
+ line = "* %s %s" % (check_name, info)
+ if descr:
+ line = line.ljust(40) + '[%s]' % descr
+ lines.append(line)
+ lines.append('')
+
+ if use_rich():
+ console_print(table)
+ else:
+ print('\n'.join(lines))
diff --git a/easybuild/tools/parallelbuild.py b/easybuild/tools/parallelbuild.py
index df151aa3a1..0621a29516 100644
--- a/easybuild/tools/parallelbuild.py
+++ b/easybuild/tools/parallelbuild.py
@@ -127,7 +127,7 @@ def submit_jobs(ordered_ecs, cmd_line_opts, testing=False, prepare_first=True):
curdir = os.getcwd()
# regex pattern for options to ignore (help options can't reach here)
- ignore_opts = re.compile('^--robot$|^--job|^--try-.*$')
+ ignore_opts = re.compile('^--robot$|^--job|^--try-.*$|^--easystack$')
# generate_cmd_line returns the options in form --longopt=value
opts = [o for o in cmd_line_opts if not ignore_opts.match(o.split('=')[0])]
diff --git a/easybuild/tools/systemtools.py b/easybuild/tools/systemtools.py
index 38fc330e65..fd9d445c54 100644
--- a/easybuild/tools/systemtools.py
+++ b/easybuild/tools/systemtools.py
@@ -43,6 +43,14 @@
from ctypes.util import find_library
from socket import gethostname
+# pkg_resources is provided by the setuptools Python package,
+# which we really want to keep as an *optional* dependency
+try:
+ import pkg_resources
+ HAVE_PKG_RESOURCES = True
+except ImportError:
+ HAVE_PKG_RESOURCES = False
+
try:
# only needed on macOS, may not be available on Linux
import ctypes.macholib.dyld
@@ -52,7 +60,7 @@
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import is_readable, read_file, which
-from easybuild.tools.py2vs3 import string_type
+from easybuild.tools.py2vs3 import OrderedDict, string_type
from easybuild.tools.run import run_cmd
@@ -153,6 +161,45 @@
RPM = 'rpm'
DPKG = 'dpkg'
+SYSTEM_TOOLS = {
+ '7z': "extracting sources (.iso)",
+ 'bunzip2': "decompressing sources (.bz2, .tbz, .tbz2, ...)",
+ DPKG: "checking OS dependencies (Debian, Ubuntu, ...)",
+ 'gunzip': "decompressing source files (.gz, .tgz, ...)",
+ 'make': "build tool",
+ 'patch': "applying patch files",
+ RPM: "checking OS dependencies (CentOS, RHEL, OpenSuSE, SLES, ...)",
+ 'sed': "runtime patching",
+ 'Slurm': "backend for --job (sbatch command)",
+ 'tar': "unpacking source files (.tar)",
+ 'unxz': "decompressing source files (.xz, .txz)",
+ 'unzip': "decompressing files (.zip)",
+}
+
+SYSTEM_TOOL_CMDS = {
+ 'Slurm': 'sbatch',
+}
+
+EASYBUILD_OPTIONAL_DEPENDENCIES = {
+ 'archspec': (None, "determining name of CPU microarchitecture"),
+ 'autopep8': (None, "auto-formatting for dumped easyconfigs"),
+ 'GC3Pie': ('gc3libs', "backend for --job"),
+ 'GitPython': ('git', "GitHub integration + using Git repository as easyconfigs archive"),
+ 'graphviz-python': ('gv', "rendering dependency graph with Graphviz: --dep-graph"),
+ 'keyring': (None, "storing GitHub token"),
+ 'pbs-python': ('pbs', "using Torque as --job backend"),
+ 'pep8': (None, "fallback for code style checking: --check-style, --check-contrib"),
+ 'pycodestyle': (None, "code style checking: --check-style, --check-contrib"),
+ 'pysvn': (None, "using SVN repository as easyconfigs archive"),
+ 'python-graph-core': ('pygraph.classes.digraph', "creating dependency graph: --dep-graph"),
+ 'python-graph-dot': ('pygraph.readwrite.dot', "saving dependency graph as dot file: --dep-graph"),
+ 'python-hglib': ('hglib', "using Mercurial repository as easyconfigs archive"),
+ 'requests': (None, "fallback library for downloading files"),
+ 'Rich': (None, "eb command rich terminal output"),
+ 'PyYAML': ('yaml', "easystack files and .yeb easyconfig format"),
+ 'setuptools': ('pkg_resources', "obtaining information on Python packages via pkg_resources module"),
+}
+
class SystemToolsException(Exception):
"""raised when systemtools fails"""
@@ -729,14 +776,14 @@ def check_os_dependency(dep):
return found
-def get_tool_version(tool, version_option='--version'):
+def get_tool_version(tool, version_option='--version', ignore_ec=False):
"""
Get output of running version option for specific command line tool.
Output is returned as a single-line string (newlines are replaced by '; ').
"""
out, ec = run_cmd(' '.join([tool, version_option]), simple=False, log_ok=False, force_in_dry_run=True,
trace=False, stream_output=False)
- if ec:
+ if not ignore_ec and ec:
_log.warning("Failed to determine version of %s using '%s %s': %s" % (tool, tool, version_option, out))
return UNKNOWN
else:
@@ -983,31 +1030,42 @@ def det_parallelism(par=None, maxpar=None):
Determine level of parallelism that should be used.
Default: educated guess based on # cores and 'ulimit -u' setting: min(# cores, ((ulimit -u) - 15) // 6)
"""
- if par is not None:
- if not isinstance(par, int):
+ def get_default_parallelism():
+ try:
+ # Get cache value if any
+ par = det_parallelism._default_parallelism
+ except AttributeError:
+ # No cache -> Calculate value from current system values
+ par = get_avail_core_count()
+ # check ulimit -u
+ out, ec = run_cmd('ulimit -u', force_in_dry_run=True, trace=False, stream_output=False)
try:
- par = int(par)
+ if out.startswith("unlimited"):
+ maxuserproc = 2 ** 32 - 1
+ else:
+ maxuserproc = int(out)
except ValueError as err:
- raise EasyBuildError("Specified level of parallelism '%s' is not an integer value: %s", par, err)
- else:
- par = get_avail_core_count()
- # check ulimit -u
- out, ec = run_cmd('ulimit -u', force_in_dry_run=True, trace=False, stream_output=False)
- try:
- if out.startswith("unlimited"):
- out = 2 ** 32 - 1
- maxuserproc = int(out)
+ raise EasyBuildError("Failed to determine max user processes (%s, %s): %s", ec, out, err)
# assume 6 processes per build thread + 15 overhead
- par_guess = int((maxuserproc - 15) // 6)
+ par_guess = (maxuserproc - 15) // 6
if par_guess < par:
par = par_guess
- _log.info("Limit parallel builds to %s because max user processes is %s" % (par, out))
+ _log.info("Limit parallel builds to %s because max user processes is %s", par, out)
+ # Cache value
+ det_parallelism._default_parallelism = par
+ return par
+
+ if par is None:
+ par = get_default_parallelism()
+ else:
+ try:
+ par = int(par)
except ValueError as err:
- raise EasyBuildError("Failed to determine max user processes (%s, %s): %s", ec, out, err)
+ raise EasyBuildError("Specified level of parallelism '%s' is not an integer value: %s", par, err)
if maxpar is not None and maxpar < par:
- _log.info("Limiting parallellism from %s to %s" % (par, maxpar))
- par = min(par, maxpar)
+ _log.info("Limiting parallellism from %s to %s", par, maxpar)
+ par = maxpar
return par
@@ -1100,3 +1158,100 @@ def pick_dep_version(dep_version):
raise EasyBuildError("Unknown value type for version: %s (%s), should be string value", typ, dep_version)
return result
+
+
+def det_pypkg_version(pkg_name, imported_pkg, import_name=None):
+ """Determine version of a Python package."""
+
+ version = None
+
+ if HAVE_PKG_RESOURCES:
+ if import_name:
+ try:
+ version = pkg_resources.get_distribution(import_name).version
+ except pkg_resources.DistributionNotFound as err:
+ _log.debug("%s Python package not found: %s", import_name, err)
+
+ if version is None:
+ try:
+ version = pkg_resources.get_distribution(pkg_name).version
+ except pkg_resources.DistributionNotFound as err:
+ _log.debug("%s Python package not found: %s", pkg_name, err)
+
+ if version is None and hasattr(imported_pkg, '__version__'):
+ version = imported_pkg.__version__
+
+ return version
+
+
+def check_easybuild_deps(modtool):
+ """
+ Check presence and version of required and optional EasyBuild dependencies, and report back to terminal.
+ """
+ version_regex = re.compile(r'\s(?P[0-9][0-9.]+[a-z]*)')
+
+ checks_data = OrderedDict()
+
+ def extract_version(tool):
+ """Helper function to extract (only) version for specific command line tool."""
+ out = get_tool_version(tool, ignore_ec=True)
+ res = version_regex.search(out)
+ if res:
+ version = res.group('version')
+ else:
+ version = "UNKNOWN version"
+
+ return version
+
+ python_version = extract_version(sys.executable)
+
+ opt_dep_versions = {}
+ for key in EASYBUILD_OPTIONAL_DEPENDENCIES:
+
+ pkg = EASYBUILD_OPTIONAL_DEPENDENCIES[key][0]
+ if pkg is None:
+ pkg = key.lower()
+
+ try:
+ mod = __import__(pkg)
+ except ImportError:
+ mod = None
+
+ if mod:
+ dep_version = det_pypkg_version(key, mod, import_name=pkg)
+ else:
+ dep_version = False
+
+ opt_dep_versions[key] = dep_version
+
+ checks_data['col_titles'] = ('name', 'version', 'used for')
+
+ req_deps_key = "Required dependencies"
+ checks_data[req_deps_key] = OrderedDict()
+ checks_data[req_deps_key]['Python'] = (python_version, None)
+ checks_data[req_deps_key]['modules tool:'] = (str(modtool), None)
+
+ opt_deps_key = "Optional dependencies"
+ checks_data[opt_deps_key] = {}
+
+ for key in opt_dep_versions:
+ checks_data[opt_deps_key][key] = (opt_dep_versions[key], EASYBUILD_OPTIONAL_DEPENDENCIES[key][1])
+
+ sys_tools_key = "System tools"
+ checks_data[sys_tools_key] = {}
+
+ for tool in SYSTEM_TOOLS:
+ tool_info = None
+ cmd = SYSTEM_TOOL_CMDS.get(tool, tool)
+ if which(cmd):
+ version = extract_version(cmd)
+ if version.startswith('UNKNOWN'):
+ tool_info = None
+ else:
+ tool_info = version
+ else:
+ tool_info = False
+
+ checks_data[sys_tools_key][tool] = (tool_info, None)
+
+ return checks_data
diff --git a/easybuild/tools/toolchain/constants.py b/easybuild/tools/toolchain/constants.py
index 307827dd1e..b334d17ae9 100644
--- a/easybuild/tools/toolchain/constants.py
+++ b/easybuild/tools/toolchain/constants.py
@@ -30,8 +30,8 @@
"""
from easybuild.tools.variables import AbsPathList
-from easybuild.tools.toolchain.variables import LinkLibraryPaths, IncludePaths, CommandFlagList, CommaStaticLibs
-from easybuild.tools.toolchain.variables import FlagList, LibraryList
+from easybuild.tools.toolchain.variables import CommandFlagList, CommaSharedLibs, CommaStaticLibs
+from easybuild.tools.toolchain.variables import FlagList, IncludePaths, LibraryList, LinkLibraryPaths
COMPILER_VARIABLES = [
@@ -114,6 +114,10 @@
('LIBBLAS', 'BLAS libraries'),
('LIBBLAS_MT', 'multithreaded BLAS libraries'),
],
+ CommaSharedLibs: [
+ ('BLAS_SHARED_LIBS', 'Comma-separated list of shared BLAS libraries'),
+ ('BLAS_MT_SHARED_LIBS', 'Comma-separated list of shared multithreaded BLAS libraries'),
+ ],
CommaStaticLibs: [
('BLAS_STATIC_LIBS', 'Comma-separated list of static BLAS libraries'),
('BLAS_MT_STATIC_LIBS', 'Comma-separated list of static multithreaded BLAS libraries'),
@@ -132,6 +136,12 @@
('LIBLAPACK', 'LAPACK libraries'),
('LIBLAPACK_MT', 'multithreaded LAPACK libraries'),
],
+ CommaSharedLibs: [
+ ('LAPACK_SHARED_LIBS', 'Comma-separated list of shared LAPACK libraries'),
+ ('LAPACK_MT_SHARED_LIBS', 'Comma-separated list of shared LAPACK libraries'),
+ ('BLAS_LAPACK_SHARED_LIBS', 'Comma-separated list of shared BLAS and LAPACK libraries'),
+ ('BLAS_LAPACK_MT_SHARED_LIBS', 'Comma-separated list of shared BLAS and LAPACK libraries'),
+ ],
CommaStaticLibs: [
('LAPACK_STATIC_LIBS', 'Comma-separated list of static LAPACK libraries'),
('LAPACK_MT_STATIC_LIBS', 'Comma-separated list of static LAPACK libraries'),
@@ -166,6 +176,10 @@
('LIBSCALAPACK', 'SCALAPACK libraries'),
('LIBSCALAPACK_MT', 'multithreaded SCALAPACK libraries'),
],
+ CommaSharedLibs: [
+ ('SCALAPACK_SHARED_LIBS', 'Comma-separated list of shared SCALAPACK libraries'),
+ ('SCALAPACK_MT_SHARED_LIBS', 'Comma-separated list of shared SCALAPACK libraries'),
+ ],
CommaStaticLibs: [
('SCALAPACK_STATIC_LIBS', 'Comma-separated list of static SCALAPACK libraries'),
('SCALAPACK_MT_STATIC_LIBS', 'Comma-separated list of static SCALAPACK libraries'),
@@ -181,6 +195,10 @@
('LIBFFT', 'FFT libraries'),
('LIBFFT_MT', 'Multithreaded FFT libraries'),
],
+ CommaSharedLibs: [
+ ('FFT_SHARED_LIBS', 'Comma-separated list of shared FFT libraries'),
+ ('FFT_SHARED_LIBS_MT', 'Comma-separated list of shared multithreaded FFT libraries'),
+ ],
CommaStaticLibs: [
('FFT_STATIC_LIBS', 'Comma-separated list of static FFT libraries'),
('FFT_STATIC_LIBS_MT', 'Comma-separated list of static multithreaded FFT libraries'),
@@ -192,6 +210,10 @@
('FFTW_LIB_DIR', 'FFTW library directory'),
('FFTW_INC_DIR', 'FFTW include directory'),
],
+ CommaSharedLibs: [
+ ('FFTW_SHARED_LIBS', 'Comma-separated list of shared FFTW libraries'),
+ ('FFTW_SHARED_LIBS_MT', 'Comma-separated list of shared multithreaded FFTW libraries'),
+ ],
CommaStaticLibs: [
('FFTW_STATIC_LIBS', 'Comma-separated list of static FFTW libraries'),
('FFTW_STATIC_LIBS_MT', 'Comma-separated list of static multithreaded FFTW libraries'),
diff --git a/easybuild/tools/toolchain/fft.py b/easybuild/tools/toolchain/fft.py
index 4facafc7c2..1e39953ba7 100644
--- a/easybuild/tools/toolchain/fft.py
+++ b/easybuild/tools/toolchain/fft.py
@@ -68,7 +68,9 @@ def _set_fft_variables(self):
if getattr(self, 'LIB_MULTITHREAD', None) is not None:
self.variables.nappend('LIBFFT_MT', self.LIB_MULTITHREAD)
+ self.variables.join('FFT_SHARED_LIBS', 'LIBFFT')
self.variables.join('FFT_STATIC_LIBS', 'LIBFFT')
+ self.variables.join('FFT_SHARED_LIBS_MT', 'LIBFFT_MT')
self.variables.join('FFT_STATIC_LIBS_MT', 'LIBFFT_MT')
for root in self.get_software_root(self.FFT_MODULE_NAME):
diff --git a/easybuild/tools/toolchain/linalg.py b/easybuild/tools/toolchain/linalg.py
index 7d27350996..2e1a46db41 100644
--- a/easybuild/tools/toolchain/linalg.py
+++ b/easybuild/tools/toolchain/linalg.py
@@ -127,7 +127,9 @@ def _set_blas_variables(self):
self.variables.nappend('LIBBLAS', self.LIB_EXTRA, position=20)
self.variables.nappend('LIBBLAS_MT', self.LIB_EXTRA, position=20)
+ self.variables.join('BLAS_SHARED_LIBS', 'LIBBLAS')
self.variables.join('BLAS_STATIC_LIBS', 'LIBBLAS')
+ self.variables.join('BLAS_MT_SHARED_LIBS', 'LIBBLAS_MT')
self.variables.join('BLAS_MT_STATIC_LIBS', 'LIBBLAS_MT')
for root in self.get_software_root(self.BLAS_MODULE_NAME):
self.variables.append_exists('BLAS_LIB_DIR', root, self.BLAS_LIB_DIR)
@@ -147,7 +149,9 @@ def _set_lapack_variables(self):
self.variables.join('LIBLAPACK_MT_ONLY', 'LIBBLAS_MT')
self.variables.join('LIBLAPACK', 'LIBBLAS')
self.variables.join('LIBLAPACK_MT', 'LIBBLAS_MT')
+ self.variables.join('LAPACK_SHARED_LIBS', 'BLAS_SHARED_LIBS')
self.variables.join('LAPACK_STATIC_LIBS', 'BLAS_STATIC_LIBS')
+ self.variables.join('LAPACK_MT_SHARED_LIBS', 'BLAS_MT_SHARED_LIBS')
self.variables.join('LAPACK_MT_STATIC_LIBS', 'BLAS_MT_STATIC_LIBS')
self.variables.join('LAPACK_LIB_DIR', 'BLAS_LIB_DIR')
self.variables.join('LAPACK_INC_DIR', 'BLAS_INC_DIR')
@@ -183,7 +187,9 @@ def _set_lapack_variables(self):
self.variables.nappend('LIBLAPACK', self.LIB_EXTRA, position=20)
self.variables.nappend('LIBLAPACK_MT', self.LIB_EXTRA, position=20)
+ self.variables.join('LAPACK_SHARED_LIBS', 'LIBLAPACK')
self.variables.join('LAPACK_STATIC_LIBS', 'LIBLAPACK')
+ self.variables.join('LAPACK_MT_SHARED_LIBS', 'LIBLAPACK_MT')
self.variables.join('LAPACK_MT_STATIC_LIBS', 'LIBLAPACK_MT')
for root in self.get_software_root(self.LAPACK_MODULE_NAME):
@@ -192,7 +198,9 @@ def _set_lapack_variables(self):
self.variables.join('BLAS_LAPACK_LIB_DIR', 'LAPACK_LIB_DIR', 'BLAS_LIB_DIR')
self.variables.join('BLAS_LAPACK_INC_DIR', 'LAPACK_INC_DIR', 'BLAS_INC_DIR')
+ self.variables.join('BLAS_LAPACK_SHARED_LIBS', 'LAPACK_SHARED_LIBS', 'BLAS_SHARED_LIBS')
self.variables.join('BLAS_LAPACK_STATIC_LIBS', 'LAPACK_STATIC_LIBS', 'BLAS_STATIC_LIBS')
+ self.variables.join('BLAS_LAPACK_MT_SHARED_LIBS', 'LAPACK_MT_SHARED_LIBS', 'BLAS_MT_SHARED_LIBS')
self.variables.join('BLAS_LAPACK_MT_STATIC_LIBS', 'LAPACK_MT_STATIC_LIBS', 'BLAS_MT_STATIC_LIBS')
# add general dependency variables
@@ -293,7 +301,9 @@ def _set_scalapack_variables(self):
self.variables.nappend('LIBSCALAPACK', self.LIB_EXTRA, position=20)
self.variables.nappend('LIBSCALAPACK_MT', self.LIB_EXTRA, position=20)
+ self.variables.join('SCALAPACK_SHARED_LIBS', 'LIBSCALAPACK')
self.variables.join('SCALAPACK_STATIC_LIBS', 'LIBSCALAPACK')
+ self.variables.join('SCALAPACK_MT_SHARED_LIBS', 'LIBSCALAPACK_MT')
self.variables.join('SCALAPACK_MT_STATIC_LIBS', 'LIBSCALAPACK_MT')
for root in self.get_software_root(self.SCALAPACK_MODULE_NAME):
self.variables.append_exists('SCALAPACK_LIB_DIR', root, self.SCALAPACK_LIB_DIR)
diff --git a/easybuild/tools/toolchain/variables.py b/easybuild/tools/toolchain/variables.py
index 30a365ad5b..6758af24de 100644
--- a/easybuild/tools/toolchain/variables.py
+++ b/easybuild/tools/toolchain/variables.py
@@ -30,6 +30,7 @@
"""
from easybuild.tools.build_log import EasyBuildError
+from easybuild.tools.systemtools import get_shared_lib_ext
from easybuild.tools.variables import StrList, AbsPathList
@@ -111,8 +112,16 @@ def change(self, separator=None, separator_begin_end=None, prefix=None, prefix_b
self.END.PREFIX = prefix_begin_end
+class CommaSharedLibs(LibraryList):
+ """Comma-separated list of shared libraries"""
+ SEPARATOR = ','
+
+ PREFIX = 'lib'
+ SUFFIX = '.' + get_shared_lib_ext()
+
+
class CommaStaticLibs(LibraryList):
- """Comma-separated list"""
+ """Comma-separated list of static libraries"""
SEPARATOR = ','
PREFIX = 'lib'
diff --git a/easybuild/tools/version.py b/easybuild/tools/version.py
index 6e9468b67c..2216e1f42d 100644
--- a/easybuild/tools/version.py
+++ b/easybuild/tools/version.py
@@ -43,7 +43,7 @@
# recent setuptools versions will *TRANSFORM* something like 'X.Y.Zdev' into 'X.Y.Z.dev0', with a warning like
# UserWarning: Normalizing '2.4.0dev' to '2.4.0.dev0'
# This causes problems further up the dependency chain...
-VERSION = LooseVersion('4.4.2.dev0')
+VERSION = LooseVersion('4.4.3.dev0')
UNKNOWN = 'UNKNOWN'
diff --git a/requirements.txt b/requirements.txt
index e5d99f3e1e..591fc502f3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -49,7 +49,9 @@ PyYAML; python_version >= '2.7'
pycodestyle; python_version < '2.7'
flake8; python_version >= '2.7'
-GC3Pie
+# 2.6.7 uses invalid Python 2 syntax
+GC3Pie!=2.6.7; python_version < '3.0'
+GC3Pie; python_version >= '3.0'
python-graph-dot
python-hglib
requests
@@ -60,3 +62,6 @@ archspec; python_version >= '2.7'
# cryptography 3.4.0 no longer supports Python 2.7
cryptography==3.3.2; python_version == '2.7'
cryptography; python_version >= '3.5'
+
+# rich is only supported for Python 3.6+
+rich; python_version >= '3.6'
diff --git a/test/framework/config.py b/test/framework/config.py
index 490fca7485..06fc1c96c8 100644
--- a/test/framework/config.py
+++ b/test/framework/config.py
@@ -302,7 +302,7 @@ def test_generaloption_config_file(self):
self.assertEqual(install_path('mod'), installpath_modules), # via config file
self.assertEqual(source_paths(), [testpath2]) # via command line
self.assertEqual(build_path(), testpath1) # via config file
- self.assertEqual(get_repositorypath(), [os.path.join(topdir, 'ebfiles_repo'), 'somesubdir']) # via config file
+ self.assertEqual(get_repositorypath(), (os.path.join(topdir, 'ebfiles_repo'), 'somesubdir')) # via config file
# hardcoded first entry
self.assertEqual(options.robot_paths[0], '/tmp/foo')
diff --git a/test/framework/easyblock.py b/test/framework/easyblock.py
index 6eb39c05da..76cf4cb659 100644
--- a/test/framework/easyblock.py
+++ b/test/framework/easyblock.py
@@ -1052,6 +1052,33 @@ def test_init_extensions(self):
error_pattern = "ConfigureMake easyblock can not be used to install extensions"
self.assertErrorRegex(EasyBuildError, error_pattern, eb.init_ext_instances)
+ def test_extension_source_tmpl(self):
+ """Test type checking for 'source_tmpl' value of an extension."""
+ self.contents = '\n'.join([
+ "easyblock = 'ConfigureMake'",
+ "name = 'toy'",
+ "version = '0.0'",
+ "homepage = 'https://example.com'",
+ "description = 'test'",
+ "toolchain = SYSTEM",
+ "exts_list = [",
+ " ('bar', '0.0', {",
+ " 'source_tmpl': [SOURCE_TAR_GZ],",
+ " }),",
+ "]",
+ ])
+ self.writeEC()
+ eb = EasyBlock(EasyConfig(self.eb_file))
+
+ error_pattern = r"source_tmpl value must be a string! "
+ error_pattern += r"\(found value of type 'list'\): \['bar-0\.0\.tar\.gz'\]"
+ self.assertErrorRegex(EasyBuildError, error_pattern, eb.fetch_step)
+
+ self.contents = self.contents.replace("'source_tmpl': [SOURCE_TAR_GZ]", "'source_tmpl': SOURCE_TAR_GZ")
+ self.writeEC()
+ eb = EasyBlock(EasyConfig(self.eb_file))
+ eb.fetch_step()
+
def test_skip_extensions_step(self):
"""Test the skip_extensions_step"""
diff --git a/test/framework/easyconfig.py b/test/framework/easyconfig.py
index 1e5704cf68..30dda253bf 100644
--- a/test/framework/easyconfig.py
+++ b/test/framework/easyconfig.py
@@ -3052,6 +3052,10 @@ def test_template_constant_dict(self):
self.assertEqual(res, expected)
# mock get_avail_core_count which is used by set_parallel -> det_parallelism
+ try:
+ del st.det_parallelism._default_parallelism # Remove cache value
+ except AttributeError:
+ pass # Ignore if not present
orig_get_avail_core_count = st.get_avail_core_count
st.get_avail_core_count = lambda: 42
@@ -4483,6 +4487,56 @@ def test_easyconfig_import(self):
error_pattern = r"Failed to copy '.*' easyconfig parameter"
self.assertErrorRegex(EasyBuildError, error_pattern, EasyConfig, test_ec)
+ def test_get_cuda_cc_template_value(self):
+ """
+ Test getting template value based on --cuda-compute-capabilities / cuda_compute_capabilities.
+ """
+ self.contents = '\n'.join([
+ 'easyblock = "ConfigureMake"',
+ 'name = "pi"',
+ 'version = "3.14"',
+ 'homepage = "http://example.com"',
+ 'description = "test easyconfig"',
+ 'toolchain = SYSTEM',
+ ])
+ self.prep()
+ ec = EasyConfig(self.eb_file)
+
+ error_pattern = "foobar is not a template value based on --cuda-compute-capabilities/cuda_compute_capabilities"
+ self.assertErrorRegex(EasyBuildError, error_pattern, ec.get_cuda_cc_template_value, 'foobar')
+
+ error_pattern = r"Template value '%s' is not defined!\n"
+ error_pattern += r"Make sure that either the --cuda-compute-capabilities EasyBuild configuration "
+ error_pattern += "option is set, or that the cuda_compute_capabilities easyconfig parameter is defined."
+ cuda_template_values = {
+ 'cuda_compute_capabilities': '6.5,7.0',
+ 'cuda_cc_space_sep': '6.5 7.0',
+ 'cuda_cc_semicolon_sep': '6.5;7.0',
+ 'cuda_sm_comma_sep': 'sm_65,sm_70',
+ 'cuda_sm_space_sep': 'sm_65 sm_70',
+ }
+ for key in cuda_template_values:
+ self.assertErrorRegex(EasyBuildError, error_pattern % key, ec.get_cuda_cc_template_value, key)
+
+ update_build_option('cuda_compute_capabilities', ['6.5', '7.0'])
+ ec = EasyConfig(self.eb_file)
+
+ for key in cuda_template_values:
+ self.assertEqual(ec.get_cuda_cc_template_value(key), cuda_template_values[key])
+
+ update_build_option('cuda_compute_capabilities', None)
+ ec = EasyConfig(self.eb_file)
+
+ for key in cuda_template_values:
+ self.assertErrorRegex(EasyBuildError, error_pattern % key, ec.get_cuda_cc_template_value, key)
+
+ self.contents += "\ncuda_compute_capabilities = ['6.5', '7.0']"
+ self.prep()
+ ec = EasyConfig(self.eb_file)
+
+ for key in cuda_template_values:
+ self.assertEqual(ec.get_cuda_cc_template_value(key), cuda_template_values[key])
+
def suite(loader=None):
""" returns all the testcases in this module """
diff --git a/test/framework/filetools.py b/test/framework/filetools.py
index b06d68767a..77bddd0b00 100644
--- a/test/framework/filetools.py
+++ b/test/framework/filetools.py
@@ -1790,6 +1790,46 @@ def test_copy_files(self):
regex = re.compile("^copied 2 files to .*/target")
self.assertTrue(regex.match(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout))
+ def test_has_recursive_symlinks(self):
+ """Test has_recursive_symlinks function"""
+ test_folder = tempfile.mkdtemp()
+ self.assertFalse(ft.has_recursive_symlinks(test_folder))
+ # Clasic Loop: Symlink to .
+ os.symlink('.', os.path.join(test_folder, 'self_link_dot'))
+ self.assertTrue(ft.has_recursive_symlinks(test_folder))
+ # Symlink to self
+ test_folder = tempfile.mkdtemp()
+ os.symlink('self_link', os.path.join(test_folder, 'self_link'))
+ self.assertTrue(ft.has_recursive_symlinks(test_folder))
+ # Symlink from 2 folders up
+ test_folder = tempfile.mkdtemp()
+ sub_folder = os.path.join(test_folder, 'sub1', 'sub2')
+ os.makedirs(sub_folder)
+ os.symlink(os.path.join('..', '..'), os.path.join(sub_folder, 'uplink'))
+ self.assertTrue(ft.has_recursive_symlinks(test_folder))
+ # Non-issue: Symlink to sibling folders
+ test_folder = tempfile.mkdtemp()
+ sub_folder = os.path.join(test_folder, 'sub1', 'sub2')
+ os.makedirs(sub_folder)
+ sibling_folder = os.path.join(test_folder, 'sub1', 'sibling')
+ os.mkdir(sibling_folder)
+ os.symlink('sibling', os.path.join(test_folder, 'sub1', 'sibling_link'))
+ os.symlink(os.path.join('..', 'sibling'), os.path.join(test_folder, sub_folder, 'sibling_link'))
+ self.assertFalse(ft.has_recursive_symlinks(test_folder))
+ # Tricky case: Sibling symlink to folder starting with the same name
+ os.mkdir(os.path.join(test_folder, 'sub11'))
+ os.symlink(os.path.join('..', 'sub11'), os.path.join(test_folder, 'sub1', 'trick_link'))
+ self.assertFalse(ft.has_recursive_symlinks(test_folder))
+ # Symlink cycle: sub1/cycle_2 -> sub2, sub2/cycle_1 -> sub1, ...
+ test_folder = tempfile.mkdtemp()
+ sub_folder1 = os.path.join(test_folder, 'sub1')
+ sub_folder2 = sub_folder = os.path.join(test_folder, 'sub2')
+ os.mkdir(sub_folder1)
+ os.mkdir(sub_folder2)
+ os.symlink(os.path.join('..', 'sub2'), os.path.join(sub_folder1, 'cycle_1'))
+ os.symlink(os.path.join('..', 'sub1'), os.path.join(sub_folder2, 'cycle_2'))
+ self.assertTrue(ft.has_recursive_symlinks(test_folder))
+
def test_copy_dir(self):
"""Test copy_dir function."""
testdir = os.path.dirname(os.path.abspath(__file__))
@@ -1861,6 +1901,15 @@ def ignore_func(_, names):
ft.mkdir(subdir)
ft.copy_dir(srcdir, target_dir, symlinks=True, dirs_exist_ok=True)
+ # Detect recursive symlinks by default instead of infinite loop during copy
+ ft.remove_dir(target_dir)
+ os.symlink('.', os.path.join(subdir, 'recursive_link'))
+ self.assertErrorRegex(EasyBuildError, 'Recursive symlinks detected', ft.copy_dir, srcdir, target_dir)
+ self.assertFalse(os.path.exists(target_dir))
+ # Ok for symlinks=True
+ ft.copy_dir(srcdir, target_dir, symlinks=True)
+ self.assertTrue(os.path.exists(target_dir))
+
# also test behaviour of copy_file under --dry-run
build_options = {
'extended_dry_run': True,
@@ -2488,62 +2537,8 @@ def test_diff_files(self):
def test_get_source_tarball_from_git(self):
"""Test get_source_tarball_from_git function."""
- git_config = {
- 'repo_name': 'testrepository',
- 'url': 'https://github.com/easybuilders',
- 'tag': 'main',
- }
target_dir = os.path.join(self.test_prefix, 'target')
- try:
- ft.get_source_tarball_from_git('test.tar.gz', target_dir, git_config)
- # (only) tarball is created in specified target dir
- self.assertTrue(os.path.isfile(os.path.join(target_dir, 'test.tar.gz')))
- self.assertEqual(os.listdir(target_dir), ['test.tar.gz'])
-
- del git_config['tag']
- git_config['commit'] = '8456f86'
- ft.get_source_tarball_from_git('test2.tar.gz', target_dir, git_config)
- self.assertTrue(os.path.isfile(os.path.join(target_dir, 'test2.tar.gz')))
- self.assertEqual(sorted(os.listdir(target_dir)), ['test.tar.gz', 'test2.tar.gz'])
-
- except EasyBuildError as err:
- if "Network is down" in str(err):
- print("Ignoring download error in test_get_source_tarball_from_git, working offline?")
- else:
- raise err
-
- git_config = {
- 'repo_name': 'testrepository',
- 'url': 'git@github.com:easybuilders',
- 'tag': 'master',
- }
- args = ['test.tar.gz', self.test_prefix, git_config]
-
- for key in ['repo_name', 'url', 'tag']:
- orig_value = git_config.pop(key)
- if key == 'tag':
- error_pattern = "Neither tag nor commit found in git_config parameter"
- else:
- error_pattern = "%s not specified in git_config parameter" % key
- self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
- git_config[key] = orig_value
-
- git_config['commit'] = '8456f86'
- error_pattern = "Tag and commit are mutually exclusive in git_config parameter"
- self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
- del git_config['commit']
-
- git_config['unknown'] = 'foobar'
- error_pattern = "Found one or more unexpected keys in 'git_config' specification"
- self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
- del git_config['unknown']
-
- args[0] = 'test.txt'
- error_pattern = "git_config currently only supports filename ending in .tar.gz"
- self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
- args[0] = 'test.tar.gz'
-
# only test in dry run mode, i.e. check which commands would be executed without actually running them
build_options = {
'extended_dry_run': True,
@@ -2553,13 +2548,10 @@ def test_get_source_tarball_from_git(self):
def run_check():
"""Helper function to run get_source_tarball_from_git & check dry run output"""
- self.mock_stdout(True)
- self.mock_stderr(True)
- res = ft.get_source_tarball_from_git('test.tar.gz', target_dir, git_config)
- stdout = self.get_stdout()
- stderr = self.get_stderr()
- self.mock_stdout(False)
- self.mock_stderr(False)
+ with self.mocked_stdout_stderr():
+ res = ft.get_source_tarball_from_git('test.tar.gz', target_dir, git_config)
+ stdout = self.get_stdout()
+ stderr = self.get_stderr()
self.assertEqual(stderr, '')
regex = re.compile(expected)
self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout))
@@ -2570,58 +2562,138 @@ def run_check():
git_config = {
'repo_name': 'testrepository',
'url': 'git@github.com:easybuilders',
- 'tag': 'master',
+ 'tag': 'tag_for_tests',
}
+ git_repo = {'git_repo': 'git@github.com:easybuilders/testrepository.git'} # Just to make the below shorter
expected = '\n'.join([
- r' running command "git clone --branch master git@github.com:easybuilders/testrepository.git"',
+ r' running command "git clone --depth 1 --branch tag_for_tests %(git_repo)s"',
r" \(in .*/tmp.*\)",
r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"',
r" \(in .*/tmp.*\)",
- ])
+ ]) % git_repo
run_check()
git_config['recursive'] = True
expected = '\n'.join([
- r' running command "git clone --branch master --recursive git@github.com:easybuilders/testrepository.git"',
+ r' running command "git clone --depth 1 --branch tag_for_tests --recursive %(git_repo)s"',
r" \(in .*/tmp.*\)",
r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"',
r" \(in .*/tmp.*\)",
- ])
+ ]) % git_repo
run_check()
git_config['keep_git_dir'] = True
expected = '\n'.join([
- r' running command "git clone --branch master --recursive git@github.com:easybuilders/testrepository.git"',
+ r' running command "git clone --branch tag_for_tests --recursive %(git_repo)s"',
r" \(in .*/tmp.*\)",
r' running command "tar cfvz .*/target/test.tar.gz testrepository"',
r" \(in .*/tmp.*\)",
- ])
+ ]) % git_repo
run_check()
del git_config['keep_git_dir']
del git_config['tag']
git_config['commit'] = '8456f86'
expected = '\n'.join([
- r' running command "git clone --recursive git@github.com:easybuilders/testrepository.git"',
+ r' running command "git clone --depth 1 --no-checkout %(git_repo)s"',
r" \(in .*/tmp.*\)",
r' running command "git checkout 8456f86 && git submodule update --init --recursive"',
r" \(in testrepository\)",
r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"',
r" \(in .*/tmp.*\)",
- ])
+ ]) % git_repo
run_check()
del git_config['recursive']
expected = '\n'.join([
- r' running command "git clone git@github.com:easybuilders/testrepository.git"',
+ r' running command "git clone --depth 1 --no-checkout %(git_repo)s"',
r" \(in .*/tmp.*\)",
r' running command "git checkout 8456f86"',
r" \(in testrepository\)",
r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"',
r" \(in .*/tmp.*\)",
- ])
+ ]) % git_repo
run_check()
+ # Test with real data.
+ init_config()
+ git_config = {
+ 'repo_name': 'testrepository',
+ 'url': 'https://github.com/easybuilders',
+ 'tag': 'branch_tag_for_test',
+ }
+
+ try:
+ res = ft.get_source_tarball_from_git('test.tar.gz', target_dir, git_config)
+ # (only) tarball is created in specified target dir
+ test_file = os.path.join(target_dir, 'test.tar.gz')
+ self.assertEqual(res, test_file)
+ self.assertTrue(os.path.isfile(test_file))
+ self.assertEqual(os.listdir(target_dir), ['test.tar.gz'])
+ # Check that we indeed downloaded the right tag
+ extracted_dir = tempfile.mkdtemp(prefix='extracted_dir')
+ extracted_repo_dir = ft.extract_file(test_file, extracted_dir, change_into_dir=False)
+ self.assertTrue(os.path.isfile(os.path.join(extracted_repo_dir, 'this-is-a-branch.txt')))
+ os.remove(test_file)
+
+ # use a tag that clashes with a branch name and make sure this is handled correctly
+ git_config['tag'] = 'tag_for_tests'
+ with self.mocked_stdout_stderr():
+ res = ft.get_source_tarball_from_git('test.tar.gz', target_dir, git_config)
+ stderr = self.get_stderr()
+ self.assertIn('Tag tag_for_tests was not downloaded in the first try', stderr)
+ self.assertEqual(res, test_file)
+ self.assertTrue(os.path.isfile(test_file))
+ # Check that we indeed downloaded the tag and not the branch
+ extracted_dir = tempfile.mkdtemp(prefix='extracted_dir')
+ extracted_repo_dir = ft.extract_file(test_file, extracted_dir, change_into_dir=False)
+ self.assertTrue(os.path.isfile(os.path.join(extracted_repo_dir, 'this-is-a-tag.txt')))
+
+ del git_config['tag']
+ git_config['commit'] = '8456f86'
+ res = ft.get_source_tarball_from_git('test2.tar.gz', target_dir, git_config)
+ test_file = os.path.join(target_dir, 'test2.tar.gz')
+ self.assertEqual(res, test_file)
+ self.assertTrue(os.path.isfile(test_file))
+ self.assertEqual(sorted(os.listdir(target_dir)), ['test.tar.gz', 'test2.tar.gz'])
+
+ except EasyBuildError as err:
+ if "Network is down" in str(err):
+ print("Ignoring download error in test_get_source_tarball_from_git, working offline?")
+ else:
+ raise err
+
+ git_config = {
+ 'repo_name': 'testrepository',
+ 'url': 'git@github.com:easybuilders',
+ 'tag': 'tag_for_tests',
+ }
+ args = ['test.tar.gz', self.test_prefix, git_config]
+
+ for key in ['repo_name', 'url', 'tag']:
+ orig_value = git_config.pop(key)
+ if key == 'tag':
+ error_pattern = "Neither tag nor commit found in git_config parameter"
+ else:
+ error_pattern = "%s not specified in git_config parameter" % key
+ self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
+ git_config[key] = orig_value
+
+ git_config['commit'] = '8456f86'
+ error_pattern = "Tag and commit are mutually exclusive in git_config parameter"
+ self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
+ del git_config['commit']
+
+ git_config['unknown'] = 'foobar'
+ error_pattern = "Found one or more unexpected keys in 'git_config' specification"
+ self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
+ del git_config['unknown']
+
+ args[0] = 'test.txt'
+ error_pattern = "git_config currently only supports filename ending in .tar.gz"
+ self.assertErrorRegex(EasyBuildError, error_pattern, ft.get_source_tarball_from_git, *args)
+ args[0] = 'test.tar.gz'
+
def test_is_sha256_checksum(self):
"""Test for is_sha256_checksum function."""
a_sha256_checksum = '44332000aa33b99ad1e00cbd1a7da769220d74647060a10e807b916d73ea27bc'
diff --git a/test/framework/github.py b/test/framework/github.py
index 135ab72ba3..f505a1f80c 100644
--- a/test/framework/github.py
+++ b/test/framework/github.py
@@ -33,6 +33,7 @@
import random
import re
import sys
+import textwrap
import unittest
from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, init_config
from time import gmtime
@@ -40,6 +41,7 @@
import easybuild.tools.testing
from easybuild.base.rest import RestClient
+from easybuild.framework.easyconfig.easyconfig import EasyConfig
from easybuild.framework.easyconfig.tools import categorize_files_by_type
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.config import build_option, module_classes, update_build_option
@@ -575,6 +577,16 @@ def test_github_find_patches(self):
reg = re.compile(r'[1-9]+ of [1-9]+ easyconfigs checked')
self.assertTrue(re.search(reg, txt))
+ self.assertEqual(gh.find_software_name_for_patch('test.patch', []), None)
+
+ # check behaviour of find_software_name_for_patch when non-UTF8 patch files are present (only with Python 3)
+ if sys.version_info[0] >= 3:
+ non_utf8_patch = os.path.join(self.test_prefix, 'problem.patch')
+ with open(non_utf8_patch, 'wb') as fp:
+ fp.write(bytes("+ ximage->byte_order=T1_byte_order; /* Set t1lib\xb4s byteorder */\n", 'iso_8859_1'))
+
+ self.assertEqual(gh.find_software_name_for_patch('test.patch', [self.test_prefix]), None)
+
def test_github_det_commit_status(self):
"""Test det_commit_status function."""
@@ -772,53 +784,103 @@ def test_github_det_patch_specs(self):
"""Test for det_patch_specs function."""
patch_paths = [os.path.join(self.test_prefix, p) for p in ['1.patch', '2.patch', '3.patch']]
- file_info = {'ecs': [
- {'name': 'A', 'patches': ['1.patch'], 'exts_list': []},
- {'name': 'B', 'patches': [], 'exts_list': []},
- ]
- }
+ file_info = {'ecs': []}
+
+ rawtxt = textwrap.dedent("""
+ easyblock = 'ConfigureMake'
+ name = 'A'
+ version = '42'
+ homepage = 'http://foo.com/'
+ description = ''
+ toolchain = {"name":"GCC", "version": "4.6.3"}
+
+ patches = ['1.patch']
+ """)
+ file_info['ecs'].append(EasyConfig(None, rawtxt=rawtxt))
+ rawtxt = textwrap.dedent("""
+ easyblock = 'ConfigureMake'
+ name = 'B'
+ version = '42'
+ homepage = 'http://foo.com/'
+ description = ''
+ toolchain = {"name":"GCC", "version": "4.6.3"}
+ """)
+ file_info['ecs'].append(EasyConfig(None, rawtxt=rawtxt))
+
error_pattern = "Failed to determine software name to which patch file .*/2.patch relates"
self.mock_stdout(True)
self.assertErrorRegex(EasyBuildError, error_pattern, gh.det_patch_specs, patch_paths, file_info, [])
self.mock_stdout(False)
- file_info['ecs'].append({'name': 'C', 'patches': [('3.patch', 'subdir'), '2.patch'], 'exts_list': []})
+ rawtxt = textwrap.dedent("""
+ easyblock = 'ConfigureMake'
+ name = 'C'
+ version = '42'
+ homepage = 'http://foo.com/'
+ description = ''
+ toolchain = {"name":"GCC", "version": "4.6.3"}
+
+ patches = [('3.patch', 'subdir'), '2.patch']
+ """)
+ file_info['ecs'].append(EasyConfig(None, rawtxt=rawtxt))
self.mock_stdout(True)
res = gh.det_patch_specs(patch_paths, file_info, [])
self.mock_stdout(False)
- self.assertEqual(len(res), 3)
- self.assertEqual(os.path.basename(res[0][0]), '1.patch')
- self.assertEqual(res[0][1], 'A')
- self.assertEqual(os.path.basename(res[1][0]), '2.patch')
- self.assertEqual(res[1][1], 'C')
- self.assertEqual(os.path.basename(res[2][0]), '3.patch')
- self.assertEqual(res[2][1], 'C')
+ self.assertEqual([i[0] for i in res], patch_paths)
+ self.assertEqual([i[1] for i in res], ['A', 'C', 'C'])
# check if patches for extensions are found
- file_info['ecs'][-1] = {
- 'name': 'patched_ext',
- 'patches': [],
- 'exts_list': [
+ rawtxt = textwrap.dedent("""
+ easyblock = 'ConfigureMake'
+ name = 'patched_ext'
+ version = '42'
+ homepage = 'http://foo.com/'
+ description = ''
+ toolchain = {"name":"GCC", "version": "4.6.3"}
+
+ exts_list = [
'foo',
('bar', '1.2.3'),
('patched', '4.5.6', {
- 'patches': [('2.patch', 1), '3.patch'],
+ 'patches': [('%(name)s-2.patch', 1), '%(name)s-3.patch'],
}),
- ],
- }
+ ]
+ """)
+ patch_paths[1:3] = [os.path.join(self.test_prefix, p) for p in ['patched-2.patch', 'patched-3.patch']]
+ file_info['ecs'][-1] = EasyConfig(None, rawtxt=rawtxt)
+
+ self.mock_stdout(True)
+ res = gh.det_patch_specs(patch_paths, file_info, [])
+ self.mock_stdout(False)
+
+ self.assertEqual([i[0] for i in res], patch_paths)
+ self.assertEqual([i[1] for i in res], ['A', 'patched_ext', 'patched_ext'])
+
+ # check if patches for components are found
+ rawtxt = textwrap.dedent("""
+ easyblock = 'PythonBundle'
+ name = 'patched_bundle'
+ version = '42'
+ homepage = 'http://foo.com/'
+ description = ''
+ toolchain = {"name":"GCC", "version": "4.6.3"}
+
+ components = [
+ ('bar', '1.2.3'),
+ ('patched', '4.5.6', {
+ 'patches': [('%(name)s-2.patch', 1), '%(name)s-3.patch'],
+ }),
+ ]
+ """)
+ file_info['ecs'][-1] = EasyConfig(None, rawtxt=rawtxt)
self.mock_stdout(True)
res = gh.det_patch_specs(patch_paths, file_info, [])
self.mock_stdout(False)
- self.assertEqual(len(res), 3)
- self.assertEqual(os.path.basename(res[0][0]), '1.patch')
- self.assertEqual(res[0][1], 'A')
- self.assertEqual(os.path.basename(res[1][0]), '2.patch')
- self.assertEqual(res[1][1], 'patched_ext')
- self.assertEqual(os.path.basename(res[2][0]), '3.patch')
- self.assertEqual(res[2][1], 'patched_ext')
+ self.assertEqual([i[0] for i in res], patch_paths)
+ self.assertEqual([i[1] for i in res], ['A', 'patched_bundle', 'patched_bundle'])
def test_github_restclient(self):
"""Test use of RestClient."""
diff --git a/test/framework/options.py b/test/framework/options.py
index f6d006e2c5..a83da8116d 100644
--- a/test/framework/options.py
+++ b/test/framework/options.py
@@ -34,6 +34,7 @@
import stat
import sys
import tempfile
+import textwrap
from distutils.version import LooseVersion
from unittest import TextTestRunner
@@ -3229,13 +3230,29 @@ def test_xxx_include_easyblocks(self):
sys.modules[pkg].__path__.remove(path)
# include extra test easyblocks
- foo_txt = '\n'.join([
- 'from easybuild.framework.easyblock import EasyBlock',
- 'class EB_foo(EasyBlock):',
- ' pass',
- ''
- ])
+ # Make them inherit from each other to trigger a known issue with changed imports, see #3779
+ # Choose naming so that order of naming is different than inheritance order
+ afoo_txt = textwrap.dedent("""
+ from easybuild.framework.easyblock import EasyBlock
+ class EB_afoo(EasyBlock):
+ def __init__(self, *args, **kwargs):
+ super(EB_afoo, self).__init__(*args, **kwargs)
+ """)
+ write_file(os.path.join(self.test_prefix, 'afoo.py'), afoo_txt)
+ foo_txt = textwrap.dedent("""
+ from easybuild.easyblocks.zfoo import EB_zfoo
+ class EB_foo(EB_zfoo):
+ def __init__(self, *args, **kwargs):
+ super(EB_foo, self).__init__(*args, **kwargs)
+ """)
write_file(os.path.join(self.test_prefix, 'foo.py'), foo_txt)
+ zfoo_txt = textwrap.dedent("""
+ from easybuild.easyblocks.afoo import EB_afoo
+ class EB_zfoo(EB_afoo):
+ def __init__(self, *args, **kwargs):
+ super(EB_zfoo, self).__init__(*args, **kwargs)
+ """)
+ write_file(os.path.join(self.test_prefix, 'zfoo.py'), zfoo_txt)
# clear log
write_file(self.logfile, '')
@@ -3253,12 +3270,27 @@ def test_xxx_include_easyblocks(self):
foo_regex = re.compile(r"^\|-- EB_foo \(easybuild.easyblocks.foo @ %s\)" % path_pattern, re.M)
self.assertTrue(foo_regex.search(logtxt), "Pattern '%s' found in: %s" % (foo_regex.pattern, logtxt))
+ ec_txt = '\n'.join([
+ 'easyblock = "EB_foo"',
+ 'name = "pi"',
+ 'version = "3.14"',
+ 'homepage = "http://example.com"',
+ 'description = "test easyconfig"',
+ 'toolchain = SYSTEM',
+ ])
+ ec = EasyConfig(path=None, rawtxt=ec_txt)
+
# easyblock is found via get_easyblock_class
- klass = get_easyblock_class('EB_foo')
- self.assertTrue(issubclass(klass, EasyBlock), "%s is an EasyBlock derivative class" % klass)
+ for name in ('EB_afoo', 'EB_foo', 'EB_zfoo'):
+ klass = get_easyblock_class(name)
+ self.assertTrue(issubclass(klass, EasyBlock), "%s (%s) is an EasyBlock derivative class" % (klass, name))
- # 'undo' import of foo easyblock
- del sys.modules['easybuild.easyblocks.foo']
+ eb_inst = klass(ec)
+ self.assertTrue(eb_inst is not None, "Instantiating the injected class %s works" % name)
+
+ # 'undo' import of the easyblocks
+ for name in ('afoo', 'foo', 'zfoo'):
+ del sys.modules['easybuild.easyblocks.' + name]
# must be run after test for --list-easyblocks, hence the '_xxx_'
# cleaning up the imported easyblocks is quite difficult...
@@ -3978,7 +4010,7 @@ def test_new_branch_github(self):
regexs = [
r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-easyconfigs.git\.\.\.",
r"^== copying files to .*/easybuild-easyconfigs\.\.\.",
- r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote,
+ r"^== pushing branch '[0-9]{14}_new_pr_toy00' to remote '.*' \(%s\) \[DRY RUN\]" % remote,
]
self._assert_regexs(regexs, txt)
@@ -3999,7 +4031,7 @@ def test_new_branch_github(self):
regexs = [
r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-easyblocks.git\.\.\.",
r"^== copying files to .*/easybuild-easyblocks\.\.\.",
- r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote,
+ r"^== pushing branch '[0-9]{14}_new_pr_toy' to remote '.*' \(%s\) \[DRY RUN\]" % remote,
]
self._assert_regexs(regexs, txt)
@@ -4026,7 +4058,7 @@ def test_new_branch_github(self):
regexs = [
r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-framework.git\.\.\.",
r"^== copying files to .*/easybuild-framework\.\.\.",
- r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote,
+ r"^== pushing branch '[0-9]{14}_new_pr_[A-Za-z]{10}' to remote '.*' \(%s\) \[DRY RUN\]" % remote,
]
self._assert_regexs(regexs, txt)
@@ -5793,6 +5825,33 @@ def test_show_system_info(self):
regex = re.compile(pattern, re.M)
self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt))
+ def test_check_eb_deps(self):
+ """Test for --check-eb-deps."""
+ txt, _ = self._run_mock_eb(['--check-eb-deps'], raise_error=True)
+
+ # keep in mind that these patterns should match with both normal output and Rich output!
+ opt_dep_info_pattern = r'([0-9.]+|\(NOT FOUND\)|not found|\(unknown version\))'
+ tool_info_pattern = r'([0-9.]+|\(NOT FOUND\)|not found|\(found, UNKNOWN version\)|version\?\!)'
+ patterns = [
+ r"Required dependencies",
+ r"Python.* [23][0-9.]+",
+ r"modules tool.* [A-Za-z0-9.\s-]+",
+ r"Optional dependencies",
+ r"archspec.* %s.*determining name" % opt_dep_info_pattern,
+ r"GitPython.* %s.*GitHub integration" % opt_dep_info_pattern,
+ r"Rich.* %s.*eb command rich terminal output" % opt_dep_info_pattern,
+ r"setuptools.* %s.*information on Python packages" % opt_dep_info_pattern,
+ r"System tools",
+ r"make.* %s" % tool_info_pattern,
+ r"patch.* %s" % tool_info_pattern,
+ r"sed.* %s" % tool_info_pattern,
+ r"Slurm.* %s" % tool_info_pattern,
+ ]
+
+ for pattern in patterns:
+ regex = re.compile(pattern, re.M)
+ self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt))
+
def test_tmp_logdir(self):
"""Test use of --tmp-logdir."""
@@ -6181,6 +6240,68 @@ def test_accept_eula_for(self):
self.eb_main(args, do_build=True, raise_error=True)
self.assertTrue(os.path.exists(toy_modfile))
+ def test_config_abs_path(self):
+ """Test ensuring of absolute path values for path configuration options."""
+
+ test_topdir = os.path.join(self.test_prefix, 'test_topdir')
+ test_subdir = os.path.join(test_topdir, 'test_middle_dir', 'test_subdir')
+ mkdir(test_subdir, parents=True)
+ change_dir(test_subdir)
+
+ # a relative path specified in a configuration file is positively weird, but fine :)
+ cfgfile = os.path.join(self.test_prefix, 'test.cfg')
+ cfgtxt = '\n'.join([
+ "[config]",
+ "containerpath = ..",
+ "repositorypath = /apps/easyconfigs_archive, somesubdir",
+ ])
+ write_file(cfgfile, cfgtxt)
+
+ # relative paths in environment variables is also weird,
+ # but OK for the sake of testing...
+ os.environ['EASYBUILD_INSTALLPATH'] = '../..'
+ os.environ['EASYBUILD_ROBOT_PATHS'] = '../..'
+
+ args = [
+ '--configfiles=%s' % cfgfile,
+ '--prefix=..',
+ '--sourcepath=.',
+ '--show-config',
+ ]
+
+ txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False, strip=True)
+
+ patterns = [
+ r"^containerpath\s+\(F\) = /.*/test_topdir/test_middle_dir$",
+ r"^installpath\s+\(E\) = /.*/test_topdir$",
+ r"^prefix\s+\(C\) = /.*/test_topdir/test_middle_dir$",
+ r"^repositorypath\s+\(F\) = \('/apps/easyconfigs_archive', ' somesubdir'\)$",
+ r"^sourcepath\s+\(C\) = /.*/test_topdir/test_middle_dir/test_subdir$",
+ r"^robot-paths\s+\(E\) = /.*/test_topdir$",
+ ]
+ for pattern in patterns:
+ regex = re.compile(pattern, re.M)
+ self.assertTrue(regex.search(txt), "Pattern '%s' should be found in: %s" % (pattern, txt))
+
+ # paths specified via --robot have precedence over those specified via $EASYBUILD_ROBOT_PATHS
+ change_dir(test_subdir)
+ args.append('--robot=..:.')
+ txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False, strip=True)
+
+ patterns.pop(-1)
+ robot_value_pattern = ', '.join([
+ r'/.*/test_topdir/test_middle_dir', # via --robot (first path)
+ r'/.*/test_topdir/test_middle_dir/test_subdir', # via --robot (second path)
+ r'/.*/test_topdir', # via $EASYBUILD_ROBOT_PATHS
+ ])
+ patterns.extend([
+ r"^robot-paths\s+\(C\) = %s$" % robot_value_pattern,
+ r"^robot\s+\(C\) = %s$" % robot_value_pattern,
+ ])
+ for pattern in patterns:
+ regex = re.compile(pattern, re.M)
+ self.assertTrue(regex.search(txt), "Pattern '%s' should be found in: %s" % (pattern, txt))
+
# end-to-end testing of unknown filename
def test_easystack_wrong_read(self):
"""Test for --easystack when wrong name is provided"""
diff --git a/test/framework/output.py b/test/framework/output.py
new file mode 100644
index 0000000000..174f8164d2
--- /dev/null
+++ b/test/framework/output.py
@@ -0,0 +1,117 @@
+# #
+# Copyright 2021-2021 Ghent University
+#
+# This file is part of EasyBuild,
+# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
+# with support of Ghent University (http://ugent.be/hpc),
+# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
+# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
+# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
+#
+# https://github.com/easybuilders/easybuild
+#
+# EasyBuild is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation v2.
+#
+# EasyBuild is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with EasyBuild. If not, see .
+# #
+"""
+Unit tests for functionality in easybuild.tools.output
+
+@author: Kenneth Hoste (Ghent University)
+"""
+import sys
+from unittest import TextTestRunner
+from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered
+
+from easybuild.tools.build_log import EasyBuildError
+from easybuild.tools.config import build_option, get_output_style, update_build_option
+from easybuild.tools.output import DummyProgress, create_progress_bar, use_rich
+
+try:
+ import rich.progress
+ HAVE_RICH = True
+except ImportError:
+ HAVE_RICH = False
+
+
+class OutputTest(EnhancedTestCase):
+ """Tests for functions controlling terminal output."""
+
+ def test_create_progress_bar(self):
+ """Test create_progress_bar function."""
+
+ if HAVE_RICH:
+ expected_progress_bar_class = rich.progress.Progress
+ else:
+ expected_progress_bar_class = DummyProgress
+
+ progress_bar = create_progress_bar()
+ error_msg = "%s should be instance of class %s" % (progress_bar, expected_progress_bar_class)
+ self.assertTrue(isinstance(progress_bar, expected_progress_bar_class), error_msg)
+
+ update_build_option('output_style', 'basic')
+ progress_bar = create_progress_bar()
+ self.assertTrue(isinstance(progress_bar, DummyProgress))
+
+ if HAVE_RICH:
+ update_build_option('output_style', 'rich')
+ progress_bar = create_progress_bar()
+ error_msg = "%s should be instance of class %s" % (progress_bar, expected_progress_bar_class)
+ self.assertTrue(isinstance(progress_bar, expected_progress_bar_class), error_msg)
+
+ update_build_option('show_progress_bar', False)
+ progress_bar = create_progress_bar()
+ self.assertTrue(isinstance(progress_bar, DummyProgress))
+
+ def test_get_output_style(self):
+ """Test get_output_style function."""
+
+ self.assertEqual(build_option('output_style'), 'auto')
+
+ for style in (None, 'auto'):
+ if style:
+ update_build_option('output_style', style)
+
+ if HAVE_RICH:
+ self.assertEqual(get_output_style(), 'rich')
+ else:
+ self.assertEqual(get_output_style(), 'basic')
+
+ test_styles = ['basic', 'no_color']
+ if HAVE_RICH:
+ test_styles.append('rich')
+
+ for style in test_styles:
+ update_build_option('output_style', style)
+ self.assertEqual(get_output_style(), style)
+
+ if not HAVE_RICH:
+ update_build_option('output_style', 'rich')
+ error_pattern = "Can't use 'rich' output style, Rich Python package is not available!"
+ self.assertErrorRegex(EasyBuildError, error_pattern, get_output_style)
+
+ def test_use_rich(self):
+ """Test use_rich function."""
+ try:
+ import rich # noqa
+ self.assertTrue(use_rich())
+ except ImportError:
+ self.assertFalse(use_rich())
+
+
+def suite():
+ """ returns all the testcases in this module """
+ return TestLoaderFiltered().loadTestsFromTestCase(OutputTest, sys.argv[1:])
+
+
+if __name__ == '__main__':
+ res = TextTestRunner(verbosity=1).run(suite())
+ sys.exit(len(res.failures))
diff --git a/test/framework/robot.py b/test/framework/robot.py
index 4213f7e2f8..be3ae74082 100644
--- a/test/framework/robot.py
+++ b/test/framework/robot.py
@@ -670,6 +670,23 @@ def test_det_easyconfig_paths(self):
regex = re.compile(r"^ \* \[.\] .*/__archive__/.*/intel-2012a.eb \(module: intel/2012a\)", re.M)
self.assertTrue(regex.search(outtxt), "Found pattern %s in %s" % (regex.pattern, outtxt))
+ args = [
+ os.path.join(test_ecs_path, 't', 'toy', 'toy-0.0.eb'),
+ os.path.join(test_ecs_path, 't', 'toy', 'toy-0.0-gompi-2018a-test.eb'),
+ os.path.join(test_ecs_path, 't', 'toy', 'toy-0.0-gompi-2018a.eb'),
+ '--dry-run',
+ '--robot',
+ '--tmpdir=%s' % self.test_prefix,
+ '--filter-ecs=*oy-0.0.eb,*-test.eb',
+ ]
+ outtxt = self.eb_main(args, raise_error=True)
+
+ regex = re.compile(r"^ \* \[.\] .*/toy-0.0-gompi-2018a.eb \(module: toy/0.0-gompi-2018a\)", re.M)
+ self.assertTrue(regex.search(outtxt), "Found pattern %s in %s" % (regex.pattern, outtxt))
+ for ec in ('toy-0.0.eb', 'toy-0.0-gompi-2018a-test.eb'):
+ regex = re.compile(r"^ \* \[.\] .*/%s \(module:" % ec, re.M)
+ self.assertFalse(regex.search(outtxt), "%s should be fitered in %s" % (ec, outtxt))
+
def test_search_paths(self):
"""Test search_paths command line argument."""
fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log')
diff --git a/test/framework/sandbox/easybuild/easyblocks/generic/pythonbundle.py b/test/framework/sandbox/easybuild/easyblocks/generic/pythonbundle.py
index 7146b8ecd3..0321602f3f 100644
--- a/test/framework/sandbox/easybuild/easyblocks/generic/pythonbundle.py
+++ b/test/framework/sandbox/easybuild/easyblocks/generic/pythonbundle.py
@@ -28,6 +28,7 @@
@author: Miguel Dias Costa (National University of Singapore)
"""
from easybuild.framework.easyblock import EasyBlock
+from easybuild.framework.easyconfig import CUSTOM
class PythonBundle(EasyBlock):
@@ -37,4 +38,7 @@ class PythonBundle(EasyBlock):
def extra_options(extra_vars=None):
if extra_vars is None:
extra_vars = {}
+ extra_vars.update({
+ 'components': [(), "List of components to install: tuples w/ name, version and easyblock to use", CUSTOM],
+ })
return EasyBlock.extra_options(extra_vars)
diff --git a/test/framework/suite.py b/test/framework/suite.py
index 9e9f46bad9..ca618289b4 100755
--- a/test/framework/suite.py
+++ b/test/framework/suite.py
@@ -66,6 +66,7 @@
import test.framework.modules as m
import test.framework.modulestool as mt
import test.framework.options as o
+import test.framework.output as ou
import test.framework.parallelbuild as p
import test.framework.package as pkg
import test.framework.repository as r
@@ -113,7 +114,7 @@
# call suite() for each module and then run them all
# note: make sure the options unit tests run first, to avoid running some of them with a readily initialized config
tests = [gen, bl, o, r, ef, ev, ebco, ep, e, mg, m, mt, f, run, a, robot, b, v, g, tcv, tc, t, c, s, lic, f_c,
- tw, p, i, pkg, d, env, et, y, st, h, ct, lib, u, es]
+ tw, p, i, pkg, d, env, et, y, st, h, ct, lib, u, es, ou]
class EasyBuildFrameworkTestSuite(unittest.TestSuite):
diff --git a/test/framework/toolchain.py b/test/framework/toolchain.py
index ad102094c3..aa95ebf46e 100644
--- a/test/framework/toolchain.py
+++ b/test/framework/toolchain.py
@@ -51,6 +51,7 @@
from easybuild.tools.filetools import read_file, symlink, write_file, which
from easybuild.tools.py2vs3 import string_type
from easybuild.tools.run import run_cmd
+from easybuild.tools.systemtools import get_shared_lib_ext
from easybuild.tools.toolchain.mpi import get_mpi_cmd_template
from easybuild.tools.toolchain.toolchain import env_vars_external_module
from easybuild.tools.toolchain.utilities import get_toolchain, search_toolchain
@@ -751,17 +752,28 @@ def test_compiler_dependent_optarch(self):
intel_options = [('intelflag', 'intelflag'), ('GENERIC', 'xSSE2'), ('', '')]
gcc_options = [('gccflag', 'gccflag'), ('march=nocona', 'march=nocona'), ('', '')]
gcccore_options = [('gcccoreflag', 'gcccoreflag'), ('GENERIC', 'march=x86-64 -mtune=generic'), ('', '')]
- toolchains = [
- ('iccifort', '2018.1.163'),
- ('GCC', '6.4.0-2.28'),
- ('GCCcore', '6.2.0'),
- ('PGI', '16.7-GCC-5.4.0-2.26'),
- ]
+
+ tc_intel = ('iccifort', '2018.1.163')
+ tc_gcc = ('GCC', '6.4.0-2.28')
+ tc_gcccore = ('GCCcore', '6.2.0')
+ tc_pgi = ('PGI', '16.7-GCC-5.4.0-2.26')
enabled = [True, False]
- test_cases = product(intel_options, gcc_options, gcccore_options, toolchains, enabled)
+ test_cases = []
+ for i, (tc, options) in enumerate(zip((tc_intel, tc_gcc, tc_gcccore),
+ (intel_options, gcc_options, gcccore_options))):
+ # Vary only the compiler specific option
+ for opt in options:
+ new_value = [intel_options[0], gcc_options[0], gcccore_options[0], tc]
+ new_value[i] = opt
+ test_cases.append(new_value)
+ # Add one case for PGI
+ test_cases.append((intel_options[0], gcc_options[0], gcccore_options[0], tc_pgi))
+
+ # Run each for enabled and disabled
+ test_cases = list(product(test_cases, enabled))
- for intel_flags, gcc_flags, gcccore_flags, (toolchain_name, toolchain_ver), enable in test_cases:
+ for (intel_flags, gcc_flags, gcccore_flags, (toolchain_name, toolchain_ver)), enable in test_cases:
intel_flags, intel_flags_exp = intel_flags
gcc_flags, gcc_flags_exp = gcc_flags
@@ -1448,11 +1460,15 @@ def test_old_new_iccifort(self):
self.setup_sandbox_for_intel_fftw(self.test_prefix, imklver='10.2.6.038')
self.modtool.prepend_module_path(self.test_prefix)
+ shlib_ext = get_shared_lib_ext()
+
# incl. -lguide
libblas_mt_intel3 = "-Wl,-Bstatic -Wl,--start-group -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core"
libblas_mt_intel3 += " -Wl,--end-group -Wl,-Bdynamic -liomp5 -lguide -lpthread"
# no -lguide
+ blas_static_libs_intel4 = 'libmkl_intel_lp64.a,libmkl_sequential.a,libmkl_core.a'
+ blas_shared_libs_intel4 = blas_static_libs_intel4.replace('.a', '.' + shlib_ext)
libblas_intel4 = "-Wl,-Bstatic -Wl,--start-group -lmkl_intel_lp64 -lmkl_sequential -lmkl_core"
libblas_intel4 += " -Wl,--end-group -Wl,-Bdynamic"
libblas_mt_intel4 = "-Wl,-Bstatic -Wl,--start-group -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core"
@@ -1469,18 +1485,83 @@ def test_old_new_iccifort(self):
libscalack_intel4 = "-lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_intel_lp64 -lmkl_sequential "
libscalack_intel4 += "-lmkl_core"
- libblas_mt_fosscuda = "-lopenblas -lgfortran -lpthread"
+ blas_static_libs_fosscuda = "libopenblas.a,libgfortran.a"
+ blas_shared_libs_fosscuda = blas_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
+ blas_mt_static_libs_fosscuda = blas_static_libs_fosscuda + ",libpthread.a"
+ blas_mt_shared_libs_fosscuda = blas_mt_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
+ libblas_fosscuda = "-lopenblas -lgfortran"
+ libblas_mt_fosscuda = libblas_fosscuda + " -lpthread"
+
+ fft_static_libs_fosscuda = "libfftw3.a"
+ fft_shared_libs_fosscuda = fft_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
+ fft_mt_static_libs_fosscuda = "libfftw3.a,libpthread.a"
+ fft_mt_shared_libs_fosscuda = fft_mt_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
+ fft_mt_static_libs_fosscuda_omp = "libfftw3_omp.a,libfftw3.a,libpthread.a"
+ fft_mt_shared_libs_fosscuda_omp = fft_mt_static_libs_fosscuda_omp.replace('.a', '.' + shlib_ext)
+ libfft_fosscuda = "-lfftw3"
+ libfft_mt_fosscuda = libfft_fosscuda + " -lpthread"
+ libfft_mt_fosscuda_omp = "-lfftw3_omp " + libfft_fosscuda + " -lpthread"
+
+ lapack_static_libs_fosscuda = "libopenblas.a,libgfortran.a"
+ lapack_shared_libs_fosscuda = lapack_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
+ lapack_mt_static_libs_fosscuda = lapack_static_libs_fosscuda + ",libpthread.a"
+ lapack_mt_shared_libs_fosscuda = lapack_mt_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
+ liblapack_fosscuda = "-lopenblas -lgfortran"
+ liblapack_mt_fosscuda = liblapack_fosscuda + " -lpthread"
+
libscalack_fosscuda = "-lscalapack -lopenblas -lgfortran"
- libfft_mt_fosscuda = "-lfftw3_omp -lfftw3 -lpthread"
+ libscalack_mt_fosscuda = libscalack_fosscuda + " -lpthread"
+ scalapack_static_libs_fosscuda = "libscalapack.a,libopenblas.a,libgfortran.a"
+ scalapack_shared_libs_fosscuda = scalapack_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
+ scalapack_mt_static_libs_fosscuda = "libscalapack.a,libopenblas.a,libgfortran.a,libpthread.a"
+ scalapack_mt_shared_libs_fosscuda = scalapack_mt_static_libs_fosscuda.replace('.a', '.' + shlib_ext)
tc = self.get_toolchain('fosscuda', version='2018a')
tc.prepare()
+ self.assertEqual(os.environ['BLAS_SHARED_LIBS'], blas_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_STATIC_LIBS'], blas_static_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_MT_SHARED_LIBS'], blas_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_MT_STATIC_LIBS'], blas_mt_static_libs_fosscuda)
+ self.assertEqual(os.environ['LIBBLAS'], libblas_fosscuda)
self.assertEqual(os.environ['LIBBLAS_MT'], libblas_mt_fosscuda)
+
+ self.assertEqual(os.environ['LAPACK_SHARED_LIBS'], lapack_shared_libs_fosscuda)
+ self.assertEqual(os.environ['LAPACK_STATIC_LIBS'], lapack_static_libs_fosscuda)
+ self.assertEqual(os.environ['LAPACK_MT_SHARED_LIBS'], lapack_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['LAPACK_MT_STATIC_LIBS'], lapack_mt_static_libs_fosscuda)
+ self.assertEqual(os.environ['LIBLAPACK'], liblapack_fosscuda)
+ self.assertEqual(os.environ['LIBLAPACK_MT'], liblapack_mt_fosscuda)
+
+ self.assertEqual(os.environ['BLAS_LAPACK_SHARED_LIBS'], blas_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_LAPACK_STATIC_LIBS'], blas_static_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_LAPACK_MT_SHARED_LIBS'], blas_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_LAPACK_MT_STATIC_LIBS'], blas_mt_static_libs_fosscuda)
+
+ self.assertEqual(os.environ['FFT_SHARED_LIBS'], fft_shared_libs_fosscuda)
+ self.assertEqual(os.environ['FFT_STATIC_LIBS'], fft_static_libs_fosscuda)
+ self.assertEqual(os.environ['FFT_SHARED_LIBS_MT'], fft_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['FFT_STATIC_LIBS_MT'], fft_mt_static_libs_fosscuda)
+ self.assertEqual(os.environ['FFTW_SHARED_LIBS'], fft_shared_libs_fosscuda)
+ self.assertEqual(os.environ['FFTW_STATIC_LIBS'], fft_static_libs_fosscuda)
+ self.assertEqual(os.environ['FFTW_SHARED_LIBS_MT'], fft_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['FFTW_STATIC_LIBS_MT'], fft_mt_static_libs_fosscuda)
+ self.assertEqual(os.environ['LIBFFT'], libfft_fosscuda)
+ self.assertEqual(os.environ['LIBFFT_MT'], libfft_mt_fosscuda)
+
self.assertEqual(os.environ['LIBSCALAPACK'], libscalack_fosscuda)
+ self.assertEqual(os.environ['LIBSCALAPACK_MT'], libscalack_mt_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_SHARED_LIBS'], scalapack_shared_libs_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_STATIC_LIBS'], scalapack_static_libs_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_MT_SHARED_LIBS'], scalapack_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_MT_STATIC_LIBS'], scalapack_mt_static_libs_fosscuda)
self.modtool.purge()
tc = self.get_toolchain('intel', version='2018a')
tc.prepare()
+ self.assertEqual(os.environ.get('BLAS_SHARED_LIBS', "(not set)"), blas_shared_libs_intel4)
+ self.assertEqual(os.environ.get('BLAS_STATIC_LIBS', "(not set)"), blas_static_libs_intel4)
+ self.assertEqual(os.environ.get('LAPACK_SHARED_LIBS', "(not set)"), blas_shared_libs_intel4)
+ self.assertEqual(os.environ.get('LAPACK_STATIC_LIBS', "(not set)"), blas_static_libs_intel4)
self.assertEqual(os.environ.get('LIBBLAS', "(not set)"), libblas_intel4)
self.assertEqual(os.environ.get('LIBBLAS_MT', "(not set)"), libblas_mt_intel4)
self.assertEqual(os.environ.get('LIBFFT', "(not set)"), libfft_intel4)
@@ -1517,9 +1598,42 @@ def test_old_new_iccifort(self):
tc = self.get_toolchain('fosscuda', version='2018a')
tc.set_options({'openmp': True})
tc.prepare()
+ self.assertEqual(os.environ['BLAS_SHARED_LIBS'], blas_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_STATIC_LIBS'], blas_static_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_MT_SHARED_LIBS'], blas_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_MT_STATIC_LIBS'], blas_mt_static_libs_fosscuda)
+ self.assertEqual(os.environ['LIBBLAS'], libblas_fosscuda)
self.assertEqual(os.environ['LIBBLAS_MT'], libblas_mt_fosscuda)
- self.assertEqual(os.environ['LIBFFT_MT'], libfft_mt_fosscuda)
+
+ self.assertEqual(os.environ['LAPACK_SHARED_LIBS'], lapack_shared_libs_fosscuda)
+ self.assertEqual(os.environ['LAPACK_STATIC_LIBS'], lapack_static_libs_fosscuda)
+ self.assertEqual(os.environ['LAPACK_MT_SHARED_LIBS'], lapack_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['LAPACK_MT_STATIC_LIBS'], lapack_mt_static_libs_fosscuda)
+ self.assertEqual(os.environ['LIBLAPACK'], liblapack_fosscuda)
+ self.assertEqual(os.environ['LIBLAPACK_MT'], liblapack_mt_fosscuda)
+
+ self.assertEqual(os.environ['BLAS_LAPACK_SHARED_LIBS'], blas_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_LAPACK_STATIC_LIBS'], blas_static_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_LAPACK_MT_SHARED_LIBS'], blas_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['BLAS_LAPACK_MT_STATIC_LIBS'], blas_mt_static_libs_fosscuda)
+
+ self.assertEqual(os.environ['FFT_SHARED_LIBS'], fft_shared_libs_fosscuda)
+ self.assertEqual(os.environ['FFT_STATIC_LIBS'], fft_static_libs_fosscuda)
+ self.assertEqual(os.environ['FFT_SHARED_LIBS_MT'], fft_mt_shared_libs_fosscuda_omp)
+ self.assertEqual(os.environ['FFT_STATIC_LIBS_MT'], fft_mt_static_libs_fosscuda_omp)
+ self.assertEqual(os.environ['FFTW_SHARED_LIBS'], fft_shared_libs_fosscuda)
+ self.assertEqual(os.environ['FFTW_STATIC_LIBS'], fft_static_libs_fosscuda)
+ self.assertEqual(os.environ['FFTW_SHARED_LIBS_MT'], fft_mt_shared_libs_fosscuda_omp)
+ self.assertEqual(os.environ['FFTW_STATIC_LIBS_MT'], fft_mt_static_libs_fosscuda_omp)
+ self.assertEqual(os.environ['LIBFFT'], libfft_fosscuda)
+ self.assertEqual(os.environ['LIBFFT_MT'], libfft_mt_fosscuda_omp)
+
self.assertEqual(os.environ['LIBSCALAPACK'], libscalack_fosscuda)
+ self.assertEqual(os.environ['LIBSCALAPACK_MT'], libscalack_mt_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_SHARED_LIBS'], scalapack_shared_libs_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_STATIC_LIBS'], scalapack_static_libs_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_MT_SHARED_LIBS'], scalapack_mt_shared_libs_fosscuda)
+ self.assertEqual(os.environ['SCALAPACK_MT_STATIC_LIBS'], scalapack_mt_static_libs_fosscuda)
def test_standalone_iccifort(self):
"""Test whether standalone installation of iccifort matches the iccifort toolchain definition."""
diff --git a/test/framework/toy_build.py b/test/framework/toy_build.py
index 5d51fd29fd..bedfbae3ea 100644
--- a/test/framework/toy_build.py
+++ b/test/framework/toy_build.py
@@ -106,21 +106,26 @@ def tearDown(self):
if os.path.exists(self.dummylogfn):
os.remove(self.dummylogfn)
- def check_toy(self, installpath, outtxt, version='0.0', versionprefix='', versionsuffix=''):
+ def check_toy(self, installpath, outtxt, version='0.0', versionprefix='', versionsuffix='', error=None):
"""Check whether toy build succeeded."""
full_version = ''.join([versionprefix, version, versionsuffix])
+ if error is not None:
+ error_msg = '\nNote: Caught error: %s' % error
+ else:
+ error_msg = ''
+
# check for success
- success = re.compile(r"COMPLETED: Installation ended successfully \(took .* secs?\)")
- self.assertTrue(success.search(outtxt), "COMPLETED message found in '%s" % outtxt)
+ success = re.compile(r"COMPLETED: Installation (ended|STOPPED) successfully \(took .* secs?\)")
+ self.assertTrue(success.search(outtxt), "COMPLETED message found in '%s'%s" % (outtxt, error_msg))
# if the module exists, it should be fine
toy_module = os.path.join(installpath, 'modules', 'all', 'toy', full_version)
msg = "module for toy build toy/%s found (path %s)" % (full_version, toy_module)
if get_module_syntax() == 'Lua':
toy_module += '.lua'
- self.assertTrue(os.path.exists(toy_module), msg)
+ self.assertTrue(os.path.exists(toy_module), msg + error_msg)
# module file is symlinked according to moduleclass
toy_module_symlink = os.path.join(installpath, 'modules', 'tools', 'toy', full_version)
@@ -183,7 +188,7 @@ def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True
raise myerr
if verify:
- self.check_toy(self.test_installpath, outtxt, versionsuffix=versionsuffix)
+ self.check_toy(self.test_installpath, outtxt, versionsuffix=versionsuffix, error=myerr)
if test_readme:
# make sure postinstallcmds were used
@@ -615,7 +620,16 @@ def test_toy_permissions_installdir(self):
# 2. Existing build with --rebuild -> Reinstall and set read-only
# 3. Existing build with --force -> Reinstall and set read-only
# 4-5: Same as 2-3 but with --skip
- for extra_args in ([], ['--rebuild'], ['--force'], ['--skip', '--rebuild'], ['--skip', '--force']):
+ # 6. Existing build with --fetch -> Test that logs can be written
+ test_cases = (
+ [],
+ ['--rebuild'],
+ ['--force'],
+ ['--skip', '--rebuild'],
+ ['--skip', '--force'],
+ ['--rebuild', '--fetch'],
+ )
+ for extra_args in test_cases:
self.mock_stdout(True)
self.test_toy_build(ec_file=test_ec, extra_args=['--read-only-installdir'] + extra_args, force=False)
self.mock_stdout(False)
@@ -2937,6 +2951,7 @@ def test_fix_shebang(self):
# copy of bin/toy to use in fix_python_shebang_for and fix_perl_shebang_for
" 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.python',",
" 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.perl',",
+ " 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.sh',",
# hardcoded path to bin/python
" 'echo \"#!/usr/bin/python\\n# test\" > %(installdir)s/bin/t1.py',",
@@ -2973,9 +2988,26 @@ def test_fix_shebang(self):
# shebang bash
" 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/b2.sh',",
+ # tests for bash shebang
+ # hardcoded path to bin/bash
+ " 'echo \"#!/bin/bash\\n# test\" > %(installdir)s/bin/t1.sh',",
+ # hardcoded path to usr/bin/bash
+ " 'echo \"#!/usr/bin/bash\\n# test\" > %(installdir)s/bin/t2.sh',",
+ # already OK, should remain the same
+ " 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/t3.sh',",
+ # shebang with space, should strip the space
+ " 'echo \"#! /usr/bin/env bash\\n# test\" > %(installdir)s/bin/t4.sh',",
+ # no shebang sh
+ " 'echo \"# test\" > %(installdir)s/bin/t5.sh',",
+ # shebang python
+ " 'echo \"#!/usr/bin/env python\\n# test\" > %(installdir)s/bin/b1.py',",
+ # shebang perl
+ " 'echo \"#!/usr/bin/env perl\\n# test\" > %(installdir)s/bin/b1.pl',",
+
"]",
- "fix_python_shebang_for = ['bin/t1.py', 'bin/*.py', 'nosuchdir/*.py', 'bin/toy.python', 'bin/b1.sh']",
- "fix_perl_shebang_for = ['bin/*.pl', 'bin/b2.sh', 'bin/toy.perl']",
+ "fix_python_shebang_for = ['bin/t1.py', 'bin/t*.py', 'nosuchdir/*.py', 'bin/toy.python', 'bin/b1.sh']",
+ "fix_perl_shebang_for = ['bin/t*.pl', 'bin/b2.sh', 'bin/toy.perl']",
+ "fix_bash_shebang_for = ['bin/t*.sh', 'bin/b1.py', 'bin/b1.pl', 'bin/toy.sh']",
])
write_file(test_ec, test_ec_txt)
self.test_toy_build(ec_file=test_ec, raise_error=True)
@@ -2984,36 +3016,31 @@ def test_fix_shebang(self):
# bin/toy and bin/toy2 should *not* be patched, since they're binary files
toy_txt = read_file(os.path.join(toy_bindir, 'toy'), mode='rb')
- for fn in ['toy.perl', 'toy.python']:
+ for fn in ['toy.sh', 'toy.perl', 'toy.python']:
fn_txt = read_file(os.path.join(toy_bindir, fn), mode='rb')
# no shebang added
self.assertFalse(fn_txt.startswith(b"#!/"))
# exact same file as original binary (untouched)
self.assertEqual(toy_txt, fn_txt)
+ regexes = {}
# no re.M, this should match at start of file!
- py_shebang_regex = re.compile(r'^#!/usr/bin/env python\n# test$')
- for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py', 't6.py', 't7.py']:
- pybin_path = os.path.join(toy_bindir, pybin)
- pybin_txt = read_file(pybin_path)
- self.assertTrue(py_shebang_regex.match(pybin_txt),
- "Pattern '%s' found in %s: %s" % (py_shebang_regex.pattern, pybin_path, pybin_txt))
-
- # no re.M, this should match at start of file!
- perl_shebang_regex = re.compile(r'^#!/usr/bin/env perl\n# test$')
- for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl', 't6.pl', 't7.pl']:
- perlbin_path = os.path.join(toy_bindir, perlbin)
- perlbin_txt = read_file(perlbin_path)
- self.assertTrue(perl_shebang_regex.match(perlbin_txt),
- "Pattern '%s' found in %s: %s" % (perl_shebang_regex.pattern, perlbin_path, perlbin_txt))
-
- # There are 2 bash files which shouldn't be influenced by fix_shebang
- bash_shebang_regex = re.compile(r'^#!/usr/bin/env bash\n# test$')
- for bashbin in ['b1.sh', 'b2.sh']:
- bashbin_path = os.path.join(toy_bindir, bashbin)
- bashbin_txt = read_file(bashbin_path)
- self.assertTrue(bash_shebang_regex.match(bashbin_txt),
- "Pattern '%s' found in %s: %s" % (bash_shebang_regex.pattern, bashbin_path, bashbin_txt))
+ regexes['py'] = re.compile(r'^#!/usr/bin/env python\n# test$')
+ regexes['pl'] = re.compile(r'^#!/usr/bin/env perl\n# test$')
+ regexes['sh'] = re.compile(r'^#!/usr/bin/env bash\n# test$')
+
+ # all scripts should have a shebang that matches their extension
+ scripts = {}
+ scripts['py'] = ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py', 't6.py', 't7.py', 'b1.py']
+ scripts['pl'] = ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl', 't6.pl', 't7.pl', 'b1.pl']
+ scripts['sh'] = ['t1.sh', 't2.sh', 't3.sh', 't4.sh', 't5.sh', 'b1.sh', 'b2.sh']
+
+ for ext in ['sh', 'pl', 'py']:
+ for script in scripts[ext]:
+ bin_path = os.path.join(toy_bindir, script)
+ bin_txt = read_file(bin_path)
+ self.assertTrue(regexes[ext].match(bin_txt),
+ "Pattern '%s' found in %s: %s" % (regexes[ext].pattern, bin_path, bin_txt))
# now test with a custom env command
extra_args = ['--env-for-shebang=/usr/bin/env -S']
@@ -3023,36 +3050,30 @@ def test_fix_shebang(self):
# bin/toy and bin/toy2 should *not* be patched, since they're binary files
toy_txt = read_file(os.path.join(toy_bindir, 'toy'), mode='rb')
- for fn in ['toy.perl', 'toy.python']:
+ for fn in ['toy.sh', 'toy.perl', 'toy.python']:
fn_txt = read_file(os.path.join(toy_bindir, fn), mode='rb')
# no shebang added
self.assertFalse(fn_txt.startswith(b"#!/"))
# exact same file as original binary (untouched)
self.assertEqual(toy_txt, fn_txt)
+ regexes_S = {}
# no re.M, this should match at start of file!
- py_shebang_regex = re.compile(r'^#!/usr/bin/env -S python\n# test$')
- for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py', 't6.py', 't7.py']:
- pybin_path = os.path.join(toy_bindir, pybin)
- pybin_txt = read_file(pybin_path)
- self.assertTrue(py_shebang_regex.match(pybin_txt),
- "Pattern '%s' found in %s: %s" % (py_shebang_regex.pattern, pybin_path, pybin_txt))
-
- # no re.M, this should match at start of file!
- perl_shebang_regex = re.compile(r'^#!/usr/bin/env -S perl\n# test$')
- for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl', 't6.pl', 't7.pl']:
- perlbin_path = os.path.join(toy_bindir, perlbin)
- perlbin_txt = read_file(perlbin_path)
- self.assertTrue(perl_shebang_regex.match(perlbin_txt),
- "Pattern '%s' found in %s: %s" % (perl_shebang_regex.pattern, perlbin_path, perlbin_txt))
-
- # There are 2 bash files which shouldn't be influenced by fix_shebang
- bash_shebang_regex = re.compile(r'^#!/usr/bin/env bash\n# test$')
- for bashbin in ['b1.sh', 'b2.sh']:
- bashbin_path = os.path.join(toy_bindir, bashbin)
- bashbin_txt = read_file(bashbin_path)
- self.assertTrue(bash_shebang_regex.match(bashbin_txt),
- "Pattern '%s' found in %s: %s" % (bash_shebang_regex.pattern, bashbin_path, bashbin_txt))
+ regexes_S['py'] = re.compile(r'^#!/usr/bin/env -S python\n# test$')
+ regexes_S['pl'] = re.compile(r'^#!/usr/bin/env -S perl\n# test$')
+ regexes_S['sh'] = re.compile(r'^#!/usr/bin/env -S bash\n# test$')
+
+ for ext in ['sh', 'pl', 'py']:
+ for script in scripts[ext]:
+ bin_path = os.path.join(toy_bindir, script)
+ bin_txt = read_file(bin_path)
+ # the scripts b1.py, b1.pl, b1.sh, b2.sh should keep their original shebang
+ if script.startswith('b'):
+ self.assertTrue(regexes[ext].match(bin_txt),
+ "Pattern '%s' found in %s: %s" % (regexes[ext].pattern, bin_path, bin_txt))
+ else:
+ self.assertTrue(regexes_S[ext].match(bin_txt),
+ "Pattern '%s' found in %s: %s" % (regexes_S[ext].pattern, bin_path, bin_txt))
def test_toy_system_toolchain_alias(self):
"""Test use of 'system' toolchain alias."""
@@ -3491,6 +3512,14 @@ def test_toy_build_sanity_check_linked_libs(self):
self.test_toy_build(ec_file=test_ec, extra_args=args, force=False,
raise_error=True, verbose=False, verify=False)
+ def test_toy_ignore_test_failure(self):
+ """Check whether use of --ignore-test-failure is mentioned in build output."""
+ args = ['--ignore-test-failure']
+ stdout, stderr = self.run_test_toy_build_with_output(extra_args=args, verify=False, testing=False)
+
+ self.assertTrue("Build succeeded (with --ignore-test-failure) for 1 out of 1" in stdout)
+ self.assertFalse(stderr)
+
def suite(loader=None):
""" return all the tests in this file """
diff --git a/test/framework/utilities.py b/test/framework/utilities.py
index ba02d87791..b04eb669bc 100644
--- a/test/framework/utilities.py
+++ b/test/framework/utilities.py
@@ -296,7 +296,13 @@ def eb_main(self, args, do_build=False, return_error=False, logfile=None, verbos
env_before = copy.deepcopy(os.environ)
try:
- main(args=args, logfile=logfile, do_build=do_build, testing=testing, modtool=self.modtool)
+ if '--fetch' in args:
+ # The config sets modules_tool to None if --fetch is specified,
+ # so do the same here to keep the behavior consistent
+ modtool = None
+ else:
+ modtool = self.modtool
+ main(args=args, logfile=logfile, do_build=do_build, testing=testing, modtool=modtool)
except SystemExit as err:
if raise_systemexit:
raise err