diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000000..cef5b1c29c --- /dev/null +++ b/.coveragerc @@ -0,0 +1,26 @@ +# .coveragerc to control coverage.py +[run] +branch = True + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +ignore_errors = True + +[html] +directory = coverage_html_report diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index fffb5264e5..c905a6a342 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -39,7 +39,7 @@ jobs: python: 3.8 fail-fast: false steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: set up Python uses: actions/setup-python@v1 @@ -49,6 +49,9 @@ jobs: - name: install OS & Python packages run: | + # disable apt-get update, we don't really need it, + # and it does more harm than good (it's fairly expensive, and it results in flaky test runs) + # sudo apt-get update # for modules tool sudo apt-get install lua5.2 liblua5.2-dev lua-filesystem lua-posix tcl tcl-dev # fix for lua-posix packaging issue, see https://bugs.launchpad.net/ubuntu/+source/lua-posix/+bug/1752082 @@ -153,19 +156,15 @@ jobs: EB_BOOTSTRAP_VERSION=$(grep '^EB_BOOTSTRAP_VERSION' easybuild/scripts/bootstrap_eb.py | sed 's/[^0-9.]//g') EB_BOOTSTRAP_SHA256SUM=$(sha256sum easybuild/scripts/bootstrap_eb.py | cut -f1 -d' ') EB_BOOTSTRAP_FOUND="$EB_BOOTSTRAP_VERSION $EB_BOOTSTRAP_SHA256SUM" - EB_BOOTSTRAP_EXPECTED="20190922.01 7927513e7448d886decfb1bb5daf840e85dc7367f57cc75e51b68f21fe109d53" + EB_BOOTSTRAP_EXPECTED="20200203.01 616bf3ce812c0844bf9ea3e690f9d88b394ed48f834ddb8424a73cf45fc64ea5" test "$EB_BOOTSTRAP_FOUND" = "$EB_BOOTSTRAP_EXPECTED" || (echo "Version check on bootstrap script failed $EB_BOOTSTRAP_FOUND" && exit 1) - # test bootstrap script (only compatible with Python 2 for now) - if [[ ${{matrix.python}} =~ '2.' ]]; then - export PREFIX=/tmp/$USER/$GITHUB_SHA/eb_bootstrap - python easybuild/scripts/bootstrap_eb.py $PREFIX - # unset $PYTHONPATH to avoid mixing two EasyBuild 'installations' when testing bootstrapped EasyBuild module - unset PYTHONPATH - # simple sanity check on bootstrapped EasyBuild module (skip when testing with Python 3, for now) - module use $PREFIX/modules/all - module load EasyBuild - eb --version - else - echo "Testing of bootstrap script skipped when testing with Python ${{matrix.python}}" - fi + # test bootstrap script + export PREFIX=/tmp/$USER/$GITHUB_SHA/eb_bootstrap + python easybuild/scripts/bootstrap_eb.py $PREFIX + # unset $PYTHONPATH to avoid mixing two EasyBuild 'installations' when testing bootstrapped EasyBuild module + unset PYTHONPATH + # simple sanity check on bootstrapped EasyBuild module + module use $PREFIX/modules/all + module load EasyBuild + eb --version diff --git a/.gitignore b/.gitignore index 31e6aff1ea..c8b95e4482 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.idea .pydevproject .project LICENSE_HEADER diff --git a/.travis.yml b/.travis.yml index 7a6a0f80da..3b032f5291 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,6 +38,8 @@ before_install: - pip --version - pip install --upgrade pip - pip --version + # coveralls doesn't support Python 2.6 anymore, so don't try to install it when testing with Python 2.6 + - if [ "x$TRAVIS_PYTHON_VERSION" != 'x2.6' ]; then pip install coveralls; fi - pip install -r requirements.txt # git config is required to make actual git commits (cfr. tests for GitRepository) - git config --global user.name "Travis CI" @@ -55,14 +57,18 @@ script: - if [ ! -z $MOD_INIT ] && [ ! -z $LMOD_VERSION ]; then alias ml=foobar; fi # set up environment for modules tool (if $MOD_INIT is defined) - if [ ! -z $MOD_INIT ]; then source $MOD_INIT; type module; fi - # install GitHub token + # install GitHub token; + # unset $GITHUB_TOKEN environment variable after installing token, + # to avoid that it is included in environment dump that is included in EasyBuild debug logs, + # which causes test_from_pr_token_log to fail... - if [ ! -z $GITHUB_TOKEN ]; then if [ "x$TRAVIS_PYTHON_VERSION" == 'x2.6' ]; then SET_KEYRING="keyring.set_keyring(keyring.backends.file.PlaintextKeyring())"; else SET_KEYRING="import keyrings; keyring.set_keyring(keyrings.alt.file.PlaintextKeyring())"; fi; python -c "import keyring; $SET_KEYRING; keyring.set_password('github_token', 'easybuild_test', '$GITHUB_TOKEN')"; - fi + fi; + unset GITHUB_TOKEN; - if [ ! -z $TEST_EASYBUILD_MODULES_TOOL ]; then export EASYBUILD_MODULES_TOOL=$TEST_EASYBUILD_MODULES_TOOL; fi - if [ ! -z $TEST_EASYBUILD_MODULE_SYNTAX ]; then export EASYBUILD_MODULE_SYNTAX=$TEST_EASYBUILD_MODULE_SYNTAX; fi # create 'source distribution' tarball, like we do when publishing a release to PyPI @@ -88,8 +94,10 @@ script: # create file owned by root but writable by anyone (used by test_copy_file) - sudo touch /tmp/file_to_overwrite_for_easybuild_test_copy_file.txt - sudo chmod o+w /tmp/file_to_overwrite_for_easybuild_test_copy_file.txt - # run test suite - - python -O -m test.framework.suite 2>&1 | tee test_framework_suite.log + # run coverage on all travis builds except for Python 2.6 + - if [ "x$TRAVIS_PYTHON_VERSION" != 'x2.6' ]; then coverage run -m test.framework.suite 2>&1 | tee test_framework_suite.log; coverage report -m --ignore-errors; fi + # invoke the regression test for Python 2.6 the original way without coverage + - if [ "x$TRAVIS_PYTHON_VERSION" == 'x2.6' ]; then python -O -m test.framework.suite 2>&1 | tee test_framework_suite.log; fi # try and make sure output of running tests is clean (no printed messages/warnings) - IGNORE_PATTERNS="no GitHub token available|skipping SvnRepository test|lib/python2.6/site-packages|requires Lmod as modules tool" # '|| true' is needed to avoid that Travis stops the job on non-zero exit of grep (i.e. when there are no matches) @@ -100,12 +108,15 @@ script: - EB_BOOTSTRAP_VERSION=$(grep '^EB_BOOTSTRAP_VERSION' $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py | sed 's/[^0-9.]//g') - EB_BOOTSTRAP_SHA256SUM=$(sha256sum $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py | cut -f1 -d' ') - EB_BOOTSTRAP_FOUND="$EB_BOOTSTRAP_VERSION $EB_BOOTSTRAP_SHA256SUM" - - EB_BOOTSTRAP_EXPECTED="20190922.01 7927513e7448d886decfb1bb5daf840e85dc7367f57cc75e51b68f21fe109d53" + - EB_BOOTSTRAP_EXPECTED="20200203.01 616bf3ce812c0844bf9ea3e690f9d88b394ed48f834ddb8424a73cf45fc64ea5" - test "$EB_BOOTSTRAP_FOUND" = "$EB_BOOTSTRAP_EXPECTED" || (echo "Version check on bootstrap script failed $EB_BOOTSTRAP_FOUND" && exit 1) - # test bootstrap script (skip when testing with Python 3 for now, since latest EasyBuild release is not compatible with Python 3 yet) - - if [ ! "x$TRAVIS_PYTHON_VERSION" =~ x3.[0-9] ]; then python $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py /tmp/$TRAVIS_JOB_ID/eb_bootstrap; fi + # test bootstrap script + - python $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py /tmp/$TRAVIS_JOB_ID/eb_bootstrap # unset $PYTHONPATH to avoid mixing two EasyBuild 'installations' when testing bootstrapped EasyBuild module - unset PYTHONPATH - # simply sanity check on bootstrapped EasyBuild module (skip when testing with Python 3, for now) - - if [ ! "x$TRAVIS_PYTHON_VERSION" =~ x3.[0-9] ]; then module use /tmp/$TRAVIS_JOB_ID/eb_bootstrap/modules/all; fi - - if [ ! "x$TRAVIS_PYTHON_VERSION" =~ x3.[0-9] ]; then module load EasyBuild; eb --version; fi + # simply sanity check on bootstrapped EasyBuild module + - module use /tmp/$TRAVIS_JOB_ID/eb_bootstrap/modules/all + - module load EasyBuild; eb --version +after_success: + - if [ "x$TRAVIS_PYTHON_VERSION" != 'x2.6' ]; then coveralls; fi + diff --git a/RELEASE_NOTES b/RELEASE_NOTES index 8239f714bc..1b71f89630 100644 --- a/RELEASE_NOTES +++ b/RELEASE_NOTES @@ -3,6 +3,69 @@ For more detailed information, please see the git log. These release notes can also be consulted at https://easybuild.readthedocs.io/en/latest/Release_notes.html. +v4.2.0 (April 14th 2020) +------------------------ + +feature release + +- add support for --try-update-deps (experimental feature), to upgrade dependencies based on available easyconfigs (#2599) +- adding locking to prevent two parallel builds of the same installation directory (#3009) + - for more information, see https://easybuild.readthedocs.io/en/latest/Locks.html +- significantly speed up -D/--dry-run by avoiding useless 'module show' (#3203) +- add support for creating an index & using it when searching for easyconfigs (#3210) + - for more information, see https://easybuild.readthedocs.io/en/latest/Easyconfigs_index.html +- additional GitHub integration features: + - add support for targeting easyblocks and framework repositories in --new-pr (#1876, #3189) + - add support for --include-easyblocks-from-pr (#3206) + - for more information, https://easybuild.readthedocs.io/en/latest/Integration_with_GitHub.html +- various other enhancements, including: + - add a contrib/hooks dir with some examples of hooks used (#2777) + - also mention working directory + input passed via stdin (if any) in trace output of run_cmd (#3168) + - probe external modules for missing metadata that is not provided via extermal module metadata file (#3174) + - also update $CMAKE_PREFIX_PATH and $CMAKE_LIBRARY_PATH in generated module file (#3176) + - optionally call log.warning in print_warning (#3195) + - add an option to git_config to retain the .git directory (#3197) + - allow use of SYSTEM as --try-toolchain option (#3213) + - mention CPU arch name in comment for uploaded test report, if it's known by archspec (#3227) + - make --merge-pr take into account --pr-target-branch (#3236) + - make --check-contrib print a warning when None is used for checksums (#3244) + - update install-EasyBuild-develop.sh script and create script for 2020a merge sprint (#3245) + - add template for mpi_cmd_prefix (#3264) + - update copy_dir to include option to merge directories (#3270) + - support template name for CUDA version (#3274) +- various bug fixes, including: + - use correct module for errors_found_in_log (#3119) + - fix EasyConfig.update code to handle both strings and lists as input (#3170) + - fix removing temporary branch on --check-github (#3182) + - fix shebang even if first line doesn't start with '#!' (#3183) + - make boostrap script work with Python 3 (#3186) + - read patch files as bytestring to avoid UnicodeDecodeError for patches that include funky characters (#3191) + - fix online check in --check-github: try repeatedly and with different URLs to cater for HTTP issues (#3194) + - don't crash when GitPython is not installed when using Python 3 (#3198) + - fix os_name_map for RHEL8 (#3201) + - don't add shebang to binary files (#3208) + - use checkout@v2 in GitHub Actions to fix broken re-triggered tests (#3219) + - don't filter out None values in to_checksums, leave them in place (#3225) + - fix defining of $MPI_INC_* and $MPI_LIB_* environment variables for impi version 2019 and later (#3229) + - use correct target account/repo when creating test report & posting comment in PR (#3234) + - reorganize EasyBlock.extensions_step to ensure correct filtering of extensions (#3235) + - also support %(installdir)s and %(builddir)s templates for extensions (#3237) + - unset $GITHUB_TOKEN in Travis after installing token, to avoid failing test_from_pr_token_log (#3252) + - fix reporting when skipping extensions (#3254) + - avoid API rate limit errors on online check in --check-github (#3257) + - show easyconfig filenames for parallel build (#3258) + - make EasyConfig.dump aware of toolchain hierarchy, to avoid hardcoded subtoolchains in dependencies easyconfig parameters (#3261) + - fix use of --copy-ec with a single argument, assume copy to current working directory (#3262) + - fix posting of comment in PR with --upload-test-report (#3272) + - take into account dependencies marked as external modules when composing template values like %(pyver)s (#3273) +- other changes: + - increase timeout & use api.github.com for connectivity check in check_github (#3192) + - restore flake8 default ignores (#3193) + - enable tracking of test suite coverage using coveralls (#3204) + - make tests use easybuilders/testrepository rather than hpcugent/testrepository after it was moved (#3238) + - improve raised error in remove_dir and deprecate rmtree2 (#3228) + + v4.1.2 (March 16th 2020) ------------------------ @@ -11,6 +74,8 @@ bugfix release - fix gitdb dependency on Python 2.6 in test configuration (#3212) - fix broken test for --review-pr by using different PR to test with (#3226) - censor authorization part of headers before logging ReST API request (#3248) + - see also https://github.com/easybuilders/easybuild-framework/security/advisories/GHSA-2wx6-wc87-rmjm + v4.1.1 (January 16th 2020) -------------------------- diff --git a/contrib/hooks/README.rst b/contrib/hooks/README.rst new file mode 100644 index 0000000000..626d403ca7 --- /dev/null +++ b/contrib/hooks/README.rst @@ -0,0 +1,15 @@ +Example implementations of EasyBuild hooks +================================= + +.. image:: https://easybuilders.github.io/easybuild/images/easybuild_logo_small.png + :align: center + +EasyBuild website: https://easybuilders.github.io/easybuild/ +docs: https://easybuild.readthedocs.io + +This directory contain examples of implementations of EasyBuild hooks +used at various sites, along with a couple of small examples with +explanations. + +See https://easybuild.readthedocs.io/en/latest/Hooks.html for +documentation on hooks in EasyBuild. diff --git a/contrib/hooks/add_delete_configopt.py b/contrib/hooks/add_delete_configopt.py new file mode 100644 index 0000000000..b349bcbdee --- /dev/null +++ b/contrib/hooks/add_delete_configopt.py @@ -0,0 +1,32 @@ +# Small example of how to add/delete a configure option. +# +# Author: Åke Sandgren, HPC2N + +# We need to be able to distinguish between versions of OpenMPI +from distutils.version import LooseVersion + + +def pre_configure_hook(self, *args, **kwargs): + # Check that we're dealing with the correct easyconfig file + if self.name == 'OpenMPI': + extra_opts = "" + # Enable using pmi from slurm + extra_opts += "--with-pmi=/lap/slurm " + + # And enable munge for OpenMPI versions that knows about it + if LooseVersion(self.version) >= LooseVersion('2'): + extra_opts += "--with-munge " + + # Now add the options + self.log.info("[pre-configure hook] Adding %s" % extra_opts) + self.cfg.update('configopts', extra_opts) + + # Now we delete some options + # For newer versions of OpenMPI we can re-enable ucx, i.e. delete the --without-ucx flag + if LooseVersion(self.version) >= LooseVersion('2.1'): + self.log.info("[pre-configure hook] Re-enabling ucx") + self.cfg['configopts'] = self.cfg['configopts'].replace('--without-ucx', ' ') + + # And we can remove the --disable-dlopen option from the easyconfig file + self.log.info("[pre-configure hook] Re-enabling dlopen") + self.cfg['configopts'] = self.cfg['configopts'].replace('--disable-dlopen', ' ') diff --git a/contrib/hooks/hpc2n_hooks.py b/contrib/hooks/hpc2n_hooks.py new file mode 100644 index 0000000000..ad93fe88e1 --- /dev/null +++ b/contrib/hooks/hpc2n_hooks.py @@ -0,0 +1,213 @@ +# Hooks for HPC2N site changes. +# +# Author: Ake Sandgren, HPC2N + +import os + +from distutils.version import LooseVersion +from easybuild.framework.easyconfig.format.format import DEPENDENCY_PARAMETERS +from easybuild.tools.filetools import apply_regex_substitutions +from easybuild.tools.build_log import EasyBuildError +from easybuild.tools.modules import get_software_root +from easybuild.tools.systemtools import get_shared_lib_ext + +# Add/remove dependencies and/or patches +# Access to the raw values before templating and such. +def parse_hook(ec, *args, **kwargs): + + # Internal helper function + def add_extra_dependencies(ec, dep_type, extra_deps): + """dep_type: must be in DEPENDENCY_PARAMETERS or 'osdependencies'""" + ec.log.info("[parse hook] Adding %s: %s" % (dep_type, extra_deps)) + + if dep_type in DEPENDENCY_PARAMETERS: + for dep in extra_deps: + ec[dep_type].append(dep) + elif dep_type == 'osdependencies': + if isinstance(extra_deps, tuple): + ec[dep_type].append(extra_deps) + else: + raise EasyBuildError("parse_hook: Type of extra_deps argument (%s), for 'osdependencies' must be " + "tuple, found %s" % (extra_deps, type(extra_deps))) + else: + raise EasyBuildError("parse_hook: Incorrect dependency type in add_extra_dependencies: %s" % dep_type) + + extra_deps = [] + + if ec.name == 'OpenMPI': + if LooseVersion(ec.version) >= LooseVersion('2') and LooseVersion(ec.version) < LooseVersion('2.1.2'): + ec.log.info("[parse hook] Adding pmi and lustre patches") + if LooseVersion(ec.version) < LooseVersion('2.1.1'): + ec['patches'].append('OpenMPI-2.0.0_fix_bad-include_of_pmi_h.patch') + + if LooseVersion(ec.version) < LooseVersion('2.0.2'): + ec['patches'].append('OpenMPI-2.0.1_fix_lustre.patch') + elif LooseVersion(ec.version) < LooseVersion('2.1'): + ec['patches'].append('OpenMPI-2.0.2_fix_lustre.patch') + elif LooseVersion(ec.version) < LooseVersion('2.1.1'): + ec['patches'].append('OpenMPI-2.1.0_fix_lustre.patch') + else: + ec['patches'].append('OpenMPI-2.1.1_fix_lustre.patch') + + if LooseVersion(ec.version) == LooseVersion('4.0.0'): + ec['patches'].append('OpenMPI-4.0.0_fix_configure_bug.patch') + + if LooseVersion(ec.version) >= LooseVersion('2.1'): + pmix_version = '1.2.5' + ucx_version = '1.4.0' + if LooseVersion(ec.version) >= LooseVersion('3'): + pmix_version = '2.2.1' + if LooseVersion(ec.version) >= LooseVersion('4'): + pmix_version = '3.0.2' # OpenMPI 4.0.0 is not compatible with PMIx 3.1.x + + extra_deps.append(('PMIx', pmix_version)) + # Use of external PMIx requires external libevent + # But PMIx already has it as a dependency so we don't need + # to explicitly set it. + + extra_deps.append(('UCX', ucx_version)) + + if ec.name == 'impi': + pmix_version = '3.1.1' + extra_deps.append(('PMIx', pmix_version)) + + if extra_deps: + add_extra_dependencies(ec, 'dependencies', extra_deps) + + +def pre_configure_hook(self, *args, **kwargs): + if self.name == 'GROMACS': + # HPC2N always uses -DGMX_USE_NVML=ON on GPU builds + if get_software_root('CUDA'): + self.log.info("[pre-configure hook] Adding -DGMX_USE_NVML=ON") + self.cfg.update('configopts', "-DGMX_USE_NVML=ON ") + + if self.name == 'OpenMPI': + extra_opts = "" + # Old versions don't work with PMIx, use slurms PMI1 + if LooseVersion(self.version) < LooseVersion('2.1'): + extra_opts += "--with-pmi=/lap/slurm " + if LooseVersion(self.version) >= LooseVersion('2'): + extra_opts += "--with-munge " + + # Using PMIx dependency in easyconfig, see above + if LooseVersion(self.version) >= LooseVersion('2.1'): + if get_software_root('PMIx'): + extra_opts += "--with-pmix=$EBROOTPMIX " + # Use of external PMIx requires external libevent + # We're using the libevent that comes from the PMIx dependency + if get_software_root('libevent'): + extra_opts += "--with-libevent=$EBROOTLIBEVENT " + else: + raise EasyBuildError("Error in pre_configure_hook for OpenMPI: External use of PMIx requires " + "external libevent, which was not found. " + "Check parse_hook for dependency settings.") + else: + raise EasyBuildError("Error in pre_configure_hook for OpenMPI: PMIx not defined in dependencies. " + "Check parse_hook for dependency settings.") + + if get_software_root('UCX'): + extra_opts += "--with-ucx=$EBROOTUCX " + + if LooseVersion(self.version) >= LooseVersion('2'): + extra_opts += "--with-cma " + extra_opts += "--with-lustre " + + # We still need to fix the knem package to install its + # pkg-config .pc file correctly, and we need a more generic + # install dir. + # extra_opts += "--with-knem=/opt/knem-1.1.2.90mlnx1 " + + self.log.info("[pre-configure hook] Adding %s" % extra_opts) + self.cfg.update('configopts', extra_opts) + + if LooseVersion(self.version) >= LooseVersion('2.1'): + self.log.info("[pre-configure hook] Re-enabling ucx") + self.cfg['configopts'] = self.cfg['configopts'].replace('--without-ucx', ' ') + + self.log.info("[pre-configure hook] Re-enabling dlopen") + self.cfg['configopts'] = self.cfg['configopts'].replace('--disable-dlopen', ' ') + + if self.name == 'PMIx': + self.log.info("[pre-configure hook] Adding --with-munge") + self.cfg.update('configopts', "--with-munge ") + if LooseVersion(self.version) >= LooseVersion('2'): + self.log.info("[pre-configure hook] Adding --with-tests-examples") + self.cfg.update('configopts', "--with-tests-examples ") + self.log.info("[pre-configure hook] Adding --disable-per-user-config-files") + self.cfg.update('configopts', "--disable-per-user-config-files") + + +def pre_build_hook(self, *args, **kwargs): + if self.name == 'pyslurm': + self.log.info("[pre-build hook] Adding --slurm=/lap/slurm") + self.cfg.update('buildopts', "--slurm=/lap/slurm ") + + +def post_install_hook(self, *args, **kwargs): + if self.name == 'impi': + # Fix mpirun from IntelMPI to explicitly unset I_MPI_PMI_LIBRARY + # it can only be used with srun. + self.log.info("[post-install hook] Unset I_MPI_PMI_LIBRARY in mpirun") + apply_regex_substitutions(os.path.join(self.installdir, "intel64", "bin", "mpirun"), [ + (r'^(#!/bin/sh.*)$', r'\1\nunset I_MPI_PMI_LIBRARY'), + ]) + + +def pre_module_hook(self, *args, **kwargs): + if self.name == 'impi': + # Add I_MPI_PMI_LIBRARY to module for IntelMPI so it works with + # srun. + self.log.info("[pre-module hook] Set I_MPI_PMI_LIBRARY in impi module") + # Must be done this way, updating self.cfg['modextravars'] + # directly doesn't work due to templating. + en_templ = self.cfg.enable_templating + self.cfg.enable_templating = False + shlib_ext = get_shared_lib_ext() + pmix_root = get_software_root('PMIx') + if pmix_root: + mpi_type = 'pmix_v3' + self.cfg['modextravars'].update({ + 'I_MPI_PMI_LIBRARY': os.path.join(pmix_root, "lib", "libpmi." + shlib_ext) + }) + self.cfg['modextravars'].update({'SLURM_MPI_TYPE': mpi_type}) + # Unfortunately UCX doesn't yet work for unknown reasons. Make sure it is off. + self.cfg['modextravars'].update({'SLURM_PMIX_DIRECT_CONN_UCX': 'false'}) + else: + self.cfg['modextravars'].update({'I_MPI_PMI_LIBRARY': "/lap/slurm/lib/libpmi.so"}) + self.cfg.enable_templating = en_templ + + if self.name == 'OpenBLAS': + self.log.info("[pre-module hook] Set OMP_NUM_THREADS=1 in OpenBLAS module") + self.cfg.update('modluafooter', 'if ((mode() == "load" and os.getenv("OMP_NUM_THREADS") == nil) ' + 'or (mode() == "unload" and os.getenv("__OpenBLAS_set_OMP_NUM_THREADS") == "1")) then ' + 'setenv("OMP_NUM_THREADS","1"); setenv("__OpenBLAS_set_OMP_NUM_THREADS", "1") end') + + if self.name == 'OpenMPI': + if LooseVersion(self.version) < LooseVersion('2.1'): + mpi_type = 'openmpi' + elif LooseVersion(self.version) < LooseVersion('3'): + mpi_type = 'pmix_v1' + elif LooseVersion(self.version) < LooseVersion('4'): + mpi_type = 'pmix_v2' + else: + mpi_type = 'pmix_v3' + + self.log.info("[pre-module hook] Set SLURM_MPI_TYPE=%s in OpenMPI module" % mpi_type) + # Must be done this way, updating self.cfg['modextravars'] + # directly doesn't work due to templating. + en_templ = self.cfg.enable_templating + self.cfg.enable_templating = False + self.cfg['modextravars'].update({'SLURM_MPI_TYPE': mpi_type}) + # Unfortunately UCX doesn't yet work for unknown reasons. Make sure it is off. + self.cfg['modextravars'].update({'SLURM_PMIX_DIRECT_CONN_UCX': 'false'}) + self.cfg.enable_templating = en_templ + + if self.name == 'PMIx': + # This is a, hopefully, temporary workaround for https://github.com/pmix/pmix/issues/1114 + if LooseVersion(self.version) > LooseVersion('2') and LooseVersion(self.version) < LooseVersion('3'): + self.log.info("[pre-module hook] Set PMIX_MCA_gds=^ds21 in PMIx module") + en_templ = self.cfg.enable_templating + self.cfg.enable_templating = False + self.cfg['modextravars'].update({'PMIX_MCA_gds': '^ds21'}) + self.cfg.enable_templating = en_templ diff --git a/easybuild/framework/easyblock.py b/easybuild/framework/easyblock.py index 300f474217..1aba3187d2 100644 --- a/easybuild/framework/easyblock.py +++ b/easybuild/framework/easyblock.py @@ -62,7 +62,7 @@ from easybuild.framework.easyconfig.tools import get_paths_for from easybuild.framework.easyconfig.templates import TEMPLATE_NAMES_EASYBLOCK_RUN_STEP, template_constant_dict from easybuild.framework.extension import resolve_exts_filter_template -from easybuild.tools import config, filetools +from easybuild.tools import config, run from easybuild.tools.build_details import get_build_stats from easybuild.tools.build_log import EasyBuildError, dry_run_msg, dry_run_warning, dry_run_set_dirs from easybuild.tools.build_log import print_error, print_msg, print_warning @@ -75,8 +75,8 @@ from easybuild.tools.filetools import change_dir, convert_name, compute_checksum, copy_file, derive_alt_pypi_url from easybuild.tools.filetools import diff_files, download_file, encode_class_name, extract_file from easybuild.tools.filetools import find_backup_name_candidate, get_source_tarball_from_git, is_alt_pypi_url -from easybuild.tools.filetools import is_sha256_checksum, mkdir, move_file, move_logs, read_file, remove_dir -from easybuild.tools.filetools import remove_file, rmtree2, verify_checksum, weld_paths, write_file, dir_contains_files +from easybuild.tools.filetools import is_binary, is_sha256_checksum, mkdir, move_file, move_logs, read_file, remove_dir +from easybuild.tools.filetools import remove_file, verify_checksum, weld_paths, write_file, dir_contains_files from easybuild.tools.hooks import BUILD_STEP, CLEANUP_STEP, CONFIGURE_STEP, EXTENSIONS_STEP, FETCH_STEP, INSTALL_STEP from easybuild.tools.hooks import MODULE_STEP, PACKAGE_STEP, PATCH_STEP, PERMISSIONS_STEP, POSTITER_STEP, POSTPROC_STEP from easybuild.tools.hooks import PREPARE_STEP, READY_STEP, SANITYCHECK_STEP, SOURCE_STEP, TEST_STEP, TESTCASES_STEP @@ -1282,47 +1282,69 @@ def make_module_req(self): lines = ['\n'] if os.path.isdir(self.installdir): - change_dir(self.installdir) + old_dir = change_dir(self.installdir) + else: + old_dir = None + if self.dry_run: + self.dry_run_msg("List of paths that would be searched and added to module file:\n") + note = "note: glob patterns are not expanded and existence checks " + note += "for paths are skipped for the statements below due to dry run" + lines.append(self.module_generator.comment(note)) + + # for these environment variables, the corresponding subdirectory must include at least one file + keys_requiring_files = set(('PATH', 'LD_LIBRARY_PATH', 'LIBRARY_PATH', 'CPATH', + 'CMAKE_PREFIX_PATH', 'CMAKE_LIBRARY_PATH')) + + for key, reqs in sorted(requirements.items()): + if isinstance(reqs, string_type): + self.log.warning("Hoisting string value %s into a list before iterating over it", reqs) + reqs = [reqs] if self.dry_run: - self.dry_run_msg("List of paths that would be searched and added to module file:\n") - note = "note: glob patterns are not expanded and existence checks " - note += "for paths are skipped for the statements below due to dry run" - lines.append(self.module_generator.comment(note)) - - # for these environment variables, the corresponding subdirectory must include at least one file - keys_requiring_files = ('CPATH', 'LD_LIBRARY_PATH', 'LIBRARY_PATH', 'PATH') - - for key in sorted(requirements): - if self.dry_run: - self.dry_run_msg(" $%s: %s" % (key, ', '.join(requirements[key]))) - reqs = requirements[key] - if isinstance(reqs, string_type): - self.log.warning("Hoisting string value %s into a list before iterating over it", reqs) - reqs = [reqs] - - for path in reqs: - # only use glob if the string is non-empty - if path and not self.dry_run: - paths = sorted(glob.glob(path)) - if paths and key in keys_requiring_files: - # only retain paths that contain at least one file - retained_paths = [ - path for path in paths - if os.path.isdir(os.path.join(self.installdir, path)) - and dir_contains_files(os.path.join(self.installdir, path)) - ] - self.log.info("Only retaining paths for %s that contain at least one file: %s -> %s", - key, paths, retained_paths) - paths = retained_paths - else: - # empty string is a valid value here (i.e. to prepend the installation prefix, cfr $CUDA_HOME) - paths = [path] + self.dry_run_msg(" $%s: %s" % (key, ', '.join(reqs))) + # Don't expand globs or do any filtering below for dry run + paths = sorted(reqs) + else: + # Expand globs but only if the string is non-empty + # empty string is a valid value here (i.e. to prepend the installation prefix, cfr $CUDA_HOME) + paths = sorted(sum((glob.glob(path) if path else [path] for path in reqs), [])) # sum flattens to list + + # If lib64 is just a symlink to lib we fixup the paths to avoid duplicates + lib64_is_symlink = (all(os.path.isdir(path) for path in ['lib', 'lib64']) + and os.path.samefile('lib', 'lib64')) + if lib64_is_symlink: + fixed_paths = [] + for path in paths: + if (path + os.path.sep).startswith('lib64' + os.path.sep): + # We only need CMAKE_LIBRARY_PATH if there is a separate lib64 path, so skip symlink + if key == 'CMAKE_LIBRARY_PATH': + continue + path = path.replace('lib64', 'lib', 1) + fixed_paths.append(path) + if fixed_paths != paths: + self.log.info("Fixed symlink lib64 in paths for %s: %s -> %s", key, paths, fixed_paths) + paths = fixed_paths + # Use a set to remove duplicates, e.g. by having lib64 and lib which get fixed to lib and lib above + paths = sorted(set(paths)) + if key in keys_requiring_files: + # only retain paths that contain at least one file + retained_paths = [ + path for path in paths + if os.path.isdir(os.path.join(self.installdir, path)) + and dir_contains_files(os.path.join(self.installdir, path)) + ] + if retained_paths != paths: + self.log.info("Only retaining paths for %s that contain at least one file: %s -> %s", + key, paths, retained_paths) + paths = retained_paths + + if paths: + lines.append(self.module_generator.prepend_paths(key, paths)) + if self.dry_run: + self.dry_run_msg('') - lines.append(self.module_generator.prepend_paths(key, paths)) - if self.dry_run: - self.dry_run_msg('') - change_dir(self.orig_workdir) + if old_dir is not None: + change_dir(old_dir) return ''.join(lines) @@ -1342,6 +1364,8 @@ def make_module_req_guess(self): 'CLASSPATH': ['*.jar'], 'XDG_DATA_DIRS': ['share'], 'GI_TYPELIB_PATH': [os.path.join(x, 'girepository-*') for x in lib_paths], + 'CMAKE_PREFIX_PATH': [''], + 'CMAKE_LIBRARY_PATH': ['lib64'], # lib and lib32 are searched through the above } def load_module(self, mod_paths=None, purge=True, extra_modules=None): @@ -1413,7 +1437,7 @@ def clean_up_fake_module(self, fake_mod_data): try: self.modules_tool.unload([self.short_mod_name]) self.modules_tool.remove_module_path(os.path.join(fake_mod_path, self.mod_subdir)) - rmtree2(os.path.dirname(fake_mod_path)) + remove_dir(os.path.dirname(fake_mod_path)) except OSError as err: raise EasyBuildError("Failed to clean up fake module dir %s: %s", fake_mod_path, err) elif self.short_mod_name is None: @@ -1439,7 +1463,7 @@ def prepare_for_extensions(self): def skip_extensions(self): """ Called when self.skip is True - - use this to detect existing extensions and to remove them from self.exts + - use this to detect existing extensions and to remove them from self.ext_instances - based on initial R version """ # obtaining untemplated reference value is required here to support legacy string templates like name/version @@ -1449,17 +1473,18 @@ def skip_extensions(self): raise EasyBuildError("Skipping of extensions, but no exts_filter set in easyconfig") res = [] - for ext in self.exts: - cmd, stdin = resolve_exts_filter_template(exts_filter, ext) + for ext_inst in self.ext_instances: + cmd, stdin = resolve_exts_filter_template(exts_filter, ext_inst) (cmdstdouterr, ec) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, inp=stdin, regexp=False) self.log.info("exts_filter result %s %s", cmdstdouterr, ec) if ec: - self.log.info("Not skipping %s" % ext['name']) - self.log.debug("exit code: %s, stdout/err: %s" % (ec, cmdstdouterr)) - res.append(ext) + self.log.info("Not skipping %s", ext_inst.name) + self.log.debug("exit code: %s, stdout/err: %s", ec, cmdstdouterr) + res.append(ext_inst) else: - self.log.info("Skipping %s" % ext['name']) - self.exts = res + print_msg("skipping extension %s" % ext_inst.name, silent=self.silent, log=self.log) + + self.ext_instances = res # # MISCELLANEOUS UTILITY FUNCTIONS @@ -1844,9 +1869,14 @@ def check_checksums_for(self, ent, sub='', source_cnt=None): else: valid_checksums = (checksum,) - if not all(is_sha256_checksum(c) for c in valid_checksums): - msg = "Non-SHA256 checksum(s) found for %s: %s" % (fn, valid_checksums) - checksum_issues.append(msg) + non_sha256_checksums = [c for c in valid_checksums if not is_sha256_checksum(c)] + if non_sha256_checksums: + if all(c is None for c in non_sha256_checksums): + print_warning("Found %d None checksum value(s), please make sure this is intended!" % + len(non_sha256_checksums)) + else: + msg = "Non-SHA256 checksum(s) found for %s: %s" % (fn, valid_checksums) + checksum_issues.append(msg) return checksum_issues @@ -2053,9 +2083,6 @@ def extensions_step(self, fetch=False): self.exts_all = self.exts[:] # retain a copy of all extensions, regardless of filtering/skipping - if self.skip: - self.skip_extensions() - # actually install extensions self.log.debug("Installing extensions") exts_defaultclass = self.cfg['exts_defaultclass'] @@ -2076,14 +2103,8 @@ def extensions_step(self, fetch=False): # get class instances for all extensions self.ext_instances = [] - exts_cnt = len(self.exts) - for idx, ext in enumerate(self.exts): - self.log.debug("Starting extension %s" % ext['name']) - tup = (ext['name'], ext.get('version', ''), idx+1, exts_cnt) - print_msg("installing extension %s %s (%d/%d)..." % tup, silent=self.silent) - - # always go back to original work dir to avoid running stuff from a dir that no longer exists - change_dir(self.orig_workdir) + for ext in self.exts: + self.log.debug("Creating class instance for extension %s...", ext['name']) cls, inst = None, None class_name = encode_class_name(ext['name']) @@ -2095,11 +2116,11 @@ def extensions_step(self, fetch=False): # with a similar name (e.g., Perl Extension 'GO' vs 'Go' for which 'EB_Go' is available) cls = get_easyblock_class(None, name=ext['name'], error_on_failed_import=False, error_on_missing_easyblock=False) - self.log.debug("Obtained class %s for extension %s" % (cls, ext['name'])) + self.log.debug("Obtained class %s for extension %s", cls, ext['name']) if cls is not None: inst = cls(self, ext) except (ImportError, NameError) as err: - self.log.debug("Failed to use extension-specific class for extension %s: %s" % (ext['name'], err)) + self.log.debug("Failed to use extension-specific class for extension %s: %s", ext['name'], err) # alternative attempt: use class specified in class map (if any) if inst is None and ext['name'] in exts_classmap: @@ -2117,7 +2138,7 @@ def extensions_step(self, fetch=False): if inst is None: try: cls = get_class_for(default_class_modpath, default_class) - self.log.debug("Obtained class %s for installing extension %s" % (cls, ext['name'])) + self.log.debug("Obtained class %s for installing extension %s", cls, ext['name']) inst = cls(self, ext) self.log.debug("Installing extension %s with default class %s (from %s)", ext['name'], default_class, default_class_modpath) @@ -2125,10 +2146,26 @@ def extensions_step(self, fetch=False): raise EasyBuildError("Also failed to use default class %s from %s for extension %s: %s, giving up", default_class, default_class_modpath, ext['name'], err) else: - self.log.debug("Installing extension %s with class %s (from %s)" % (ext['name'], class_name, mod_path)) + self.log.debug("Installing extension %s with class %s (from %s)", ext['name'], class_name, mod_path) + + self.ext_instances.append(inst) + + if self.skip: + self.skip_extensions() + + exts_cnt = len(self.ext_instances) + for idx, ext in enumerate(self.ext_instances): + + self.log.debug("Starting extension %s" % ext.name) + + # always go back to original work dir to avoid running stuff from a dir that no longer exists + change_dir(self.orig_workdir) + + tup = (ext.name, ext.version or '', idx+1, exts_cnt) + print_msg("installing extension %s %s (%d/%d)..." % tup, silent=self.silent) if self.dry_run: - tup = (ext['name'], ext.get('version', ''), cls.__name__) + tup = (ext.name, ext.version, cls.__name__) msg = "\n* installing extension %s %s using '%s' easyblock\n" % tup self.dry_run_msg(msg) @@ -2141,18 +2178,15 @@ def extensions_step(self, fetch=False): else: # don't reload modules for toolchain, there is no need since they will be loaded already; # the (fake) module for the parent software gets loaded before installing extensions - inst.toolchain.prepare(onlymod=self.cfg['onlytcmod'], silent=True, loadmod=False, - rpath_filter_dirs=self.rpath_filter_dirs) + ext.toolchain.prepare(onlymod=self.cfg['onlytcmod'], silent=True, loadmod=False, + rpath_filter_dirs=self.rpath_filter_dirs) # real work - inst.prerun() - txt = inst.run() + ext.prerun() + txt = ext.run() if txt: self.module_extra_extensions += txt - inst.postrun() - - # append so we can make us of it later (in sanity_check_step) - self.ext_instances.append(inst) + ext.postrun() # cleanup (unload fake module, remove fake module dir) if fake_mod_data: @@ -2199,17 +2233,26 @@ def fix_shebang(self): lang, shebang, glob_pattern, paths) for path in paths: # check whether file should be patched by checking whether it has a shebang we want to tweak; - # this also helps to skip binary files we may be hitting + # this also helps to skip binary files we may be hitting (but only with Python 3) try: contents = read_file(path, mode='r') should_patch = shebang_regex.match(contents) except (TypeError, UnicodeDecodeError): should_patch = False + contents = None + # if an existing shebang is found, patch it if should_patch: contents = shebang_regex.sub(shebang, contents) write_file(path, contents) + # if no shebang is present at all, add one (but only for non-binary files!) + elif contents is not None and not is_binary(contents) and not contents.startswith('#!'): + self.log.info("The file '%s' doesn't have any shebang present, inserting it as first line.", + path) + contents = shebang + '\n' + contents + write_file(path, contents) + def post_install_step(self): """ Do some postprocessing @@ -2628,7 +2671,7 @@ def cleanup_step(self): self.log.info("Cleaning up builddir %s (in %s)", self.builddir, os.getcwd()) try: - rmtree2(self.builddir) + remove_dir(self.builddir) base = os.path.dirname(self.builddir) # keep removing empty directories until we either find a non-empty one @@ -3000,6 +3043,37 @@ def run_all_steps(self, run_test_cases): print_msg("building and installing %s..." % self.full_mod_name, log=self.log, silent=self.silent) trace_msg("installation prefix: %s" % self.installdir) + + ignore_locks = build_option('ignore_locks') + + if ignore_locks: + self.log.info("Ignoring locks...") + else: + locks_dir = build_option('locks_dir') or os.path.join(install_path('software'), '.locks') + lock_path = os.path.join(locks_dir, '%s.lock' % self.installdir.replace('/', '_')) + + # if lock already exists, either abort or wait until it disappears + if os.path.exists(lock_path): + wait_on_lock = build_option('wait_on_lock') + if wait_on_lock: + while os.path.exists(lock_path): + print_msg("lock %s exists, waiting %d seconds..." % (lock_path, wait_on_lock), + silent=self.silent) + time.sleep(wait_on_lock) + else: + raise EasyBuildError("Lock %s already exists, aborting!", lock_path) + + # create lock to avoid that another installation running in parallel messes things up; + # we use a directory as a lock, since that's atomically created + try: + mkdir(lock_path, parents=True) + except EasyBuildError as err: + # clean up the error message a bit, get rid of the "Failed to create directory" part + quotes + stripped_err = str(err).split(':', 1)[1].strip().replace("'", '').replace('"', '') + raise EasyBuildError("Failed to create lock %s: %s", lock_path, stripped_err) + + self.log.info("Lock created: %s", lock_path) + try: for (step_name, descr, step_methods, skippable) in steps: if self._skip_step(step_name, skippable): @@ -3014,6 +3088,10 @@ def run_all_steps(self, run_test_cases): except StopException: pass + finally: + if not ignore_locks: + remove_dir(lock_path) + self.log.info("Lock removed: %s", lock_path) # return True for successfull build (or stopped build) return True @@ -3059,7 +3137,7 @@ def build_and_install_one(ecdict, init_env): # restore original environment, and then sanitize it _log.info("Resetting environment") - filetools.errors_found_in_log = 0 + run.errors_found_in_log = 0 restore_env(init_env) sanitize_env() @@ -3210,10 +3288,9 @@ def build_and_install_one(ecdict, init_env): print_msg("%s: Installation %s %s (took %s)" % (summary, ended, succ, req_time), log=_log, silent=silent) # check for errors - if filetools.errors_found_in_log > 0: - print_msg("WARNING: %d possible error(s) were detected in the " - "build logs, please verify the build." % filetools.errors_found_in_log, - _log, silent=silent) + if run.errors_found_in_log > 0: + _log.warning("%d possible error(s) were detected in the " + "build logs, please verify the build.", run.errors_found_in_log) if app.postmsg: print_msg("\nWARNING: %s\n" % app.postmsg, log=_log, silent=silent) @@ -3253,7 +3330,7 @@ def reproduce_build(app, reprod_dir_root): reprod_dir = find_backup_name_candidate(os.path.join(reprod_dir_root, REPROD)) reprod_spec = os.path.join(reprod_dir, ec_filename) try: - app.cfg.dump(reprod_spec) + app.cfg.dump(reprod_spec, explicit_toolchains=True) _log.info("Dumped easyconfig instance to %s", reprod_spec) except NotImplementedError as err: _log.warning("Unable to dump easyconfig instance to %s: %s", reprod_spec, err) diff --git a/easybuild/framework/easyconfig/easyconfig.py b/easybuild/framework/easyconfig/easyconfig.py index b3e8af1cb8..3faeb84b90 100644 --- a/easybuild/framework/easyconfig/easyconfig.py +++ b/easybuild/framework/easyconfig/easyconfig.py @@ -36,6 +36,7 @@ :author: Alan O'Cais (Juelich Supercomputing Centre) :author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada) :author: Maxime Boissonneault (Universite Laval, Calcul Quebec, Compute Canada) +:author: Victor Holanda (CSCS, ETH Zurich) """ import copy @@ -45,6 +46,7 @@ import re from distutils.version import LooseVersion +import easybuild.tools.filetools as filetools from easybuild.base import fancylogger from easybuild.framework.easyconfig import MANDATORY from easybuild.framework.easyconfig.constants import EXTERNAL_MODULE_MARKER @@ -58,10 +60,12 @@ from easybuild.framework.easyconfig.parser import EasyConfigParser, fetch_parameters_from_easyconfig from easybuild.framework.easyconfig.templates import TEMPLATE_CONSTANTS, template_constant_dict from easybuild.tools.build_log import EasyBuildError, print_warning, print_msg -from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_ERROR, LOCAL_VAR_NAMING_CHECK_LOG, LOCAL_VAR_NAMING_CHECK_WARN +from easybuild.tools.config import GENERIC_EASYBLOCK_PKG, LOCAL_VAR_NAMING_CHECK_ERROR, LOCAL_VAR_NAMING_CHECK_LOG +from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_WARN from easybuild.tools.config import Singleton, build_option, get_module_naming_scheme -from easybuild.tools.filetools import EASYBLOCK_CLASS_PREFIX, copy_file, decode_class_name, encode_class_name -from easybuild.tools.filetools import find_backup_name_candidate, find_easyconfigs, read_file, write_file +from easybuild.tools.filetools import convert_name, copy_file, create_index, decode_class_name, encode_class_name +from easybuild.tools.filetools import find_backup_name_candidate, find_easyconfigs, load_index +from easybuild.tools.filetools import read_file, write_file from easybuild.tools.hooks import PARSE, load_hooks, run_hook from easybuild.tools.module_naming_scheme.mns import DEVEL_MODULE_SUFFIX from easybuild.tools.module_naming_scheme.utilities import avail_module_naming_schemes, det_full_ec_version @@ -102,6 +106,7 @@ _easyconfig_files_cache = {} _easyconfigs_cache = {} +_path_indexes = {} def handle_deprecated_or_replaced_easyconfig_parameters(ec_method): @@ -553,21 +558,30 @@ def update(self, key, value, allow_duplicate=True): """ Update a string configuration value with a value (i.e. append to it). """ - prev_value = self[key] - if isinstance(prev_value, string_type): - if allow_duplicate or value not in prev_value: - self[key] = '%s %s ' % (prev_value, value) - elif isinstance(prev_value, list): - if allow_duplicate: - self[key] = prev_value + value - else: - for item in value: - # add only those items that aren't already in the list - if item not in prev_value: - self[key] = prev_value + [item] + if isinstance(value, string_type): + lval = [value] + elif isinstance(value, list): + lval = value + else: + msg = "Can't update configuration value for %s, because the " + msg += "attempted update value, '%s', is not a string or list." + raise EasyBuildError(msg, key, value) + + param_value = self[key] + if isinstance(param_value, string_type): + for item in lval: + # re.search: only add value to string if it's not there yet (surrounded by whitespace) + if allow_duplicate or (not re.search(r'(^|\s+)%s(\s+|$)' % re.escape(item), param_value)): + param_value = param_value + ' %s ' % item + elif isinstance(param_value, list): + for item in lval: + if allow_duplicate or item not in param_value: + param_value = param_value + [item] else: raise EasyBuildError("Can't update configuration value for %s, because it's not a string or list.", key) + self[key] = param_value + def set_keys(self, params): """ Set keys in this EasyConfig instance based on supplied easyconfig parameter values. @@ -1087,7 +1101,7 @@ def all_dependencies(self): return self._all_dependencies - def dump(self, fp, always_overwrite=True, backup=False): + def dump(self, fp, always_overwrite=True, backup=False, explicit_toolchains=False): """ Dump this easyconfig to file, with the given filename. @@ -1116,8 +1130,18 @@ def dump(self, fp, always_overwrite=True, backup=False): if self.template_values[key] not in templ_val and len(self.template_values[key]) > 2: templ_val[self.template_values[key]] = key + toolchain_hierarchy = None + if not explicit_toolchains: + try: + toolchain_hierarchy = get_toolchain_hierarchy(self['toolchain']) + except EasyBuildError as err: + # don't fail hard just because we can't get the hierarchy + self.log.warning('Could not generate toolchain hierarchy for %s to use in easyconfig dump method, ' + 'error:\n%s', self['toolchain'], str(err)) + try: - ectxt = self.parser.dump(self, default_values, templ_const, templ_val) + ectxt = self.parser.dump(self, default_values, templ_const, templ_val, + toolchain_hierarchy=toolchain_hierarchy) except NotImplementedError as err: # need to restore enable_templating value in case this method is caught in a try/except block and ignored # (the ability to dump is not a hard requirement for build success) @@ -1152,20 +1176,133 @@ def _validate(self, attr, values): # private method if self[attr] and self[attr] not in values: raise EasyBuildError("%s provided '%s' is not valid: %s", attr, self[attr], values) - def handle_external_module_metadata(self, dep_name): + def probe_external_module_metadata(self, mod_name, existing_metadata=None): """ - helper function for _parse_dependency - handles metadata for external module dependencies + Helper function for handle_external_module_metadata. + + Tries to determine metadata for external module when there is not entry in the metadata file, + by looking at the variables defined by the module file. + + This is mainly intended for modules provided in the Cray Programming Environment, + but it could also be useful in other contexts. + + The following pairs of variables are considered (in order, first hit wins), + where 'XXX' is the software name in capitals: + 1. $CRAY_XXX_PREFIX and $CRAY_XXX_VERSION + 1. $CRAY_XXX_PREFIX_DIR and $CRAY_XXX_VERSION + 2. $CRAY_XXX_DIR and $CRAY_XXX_VERSION + 2. $CRAY_XXX_ROOT and $CRAY_XXX_VERSION + 5. $XXX_PREFIX and $XXX_VERSION + 4. $XXX_DIR and $XXX_VERSION + 5. $XXX_ROOT and $XXX_VERSION + 3. $XXX_HOME and $XXX_VERSION + + If none of the pairs is found, then an empty dictionary is returned. + + :param mod_name: name of the external module + :param metadata: already available metadata for this external module (if any) """ - dependency = {} - if dep_name in self.external_modules_metadata: - dependency['external_module_metadata'] = self.external_modules_metadata[dep_name] - self.log.info("Updated dependency info with available metadata for external module %s: %s", - dep_name, dependency['external_module_metadata']) + res = {} + + if existing_metadata is None: + existing_metadata = {} + + soft_name = existing_metadata.get('name') + if soft_name: + # software name is a list of names in metadata, just grab first one + soft_name = soft_name[0] else: - self.log.info("No metadata available for external module %s", dep_name) + # if the software name is not known yet, use the first part of the module name as software name, + # but strip off the leading 'cray-' part first (examples: cray-netcdf/4.6.1.3, cray-fftw/3.3.8.2) + soft_name = mod_name.split('/')[0] + + cray_prefix = 'cray-' + if soft_name.startswith(cray_prefix): + soft_name = soft_name[len(cray_prefix):] + + # determine software name to use in names of environment variables (upper case, '-' becomes '_') + soft_name_in_mod_name = convert_name(soft_name.replace('-', '_'), upper=True) + + var_name_pairs = [ + ('CRAY_%s_PREFIX', 'CRAY_%s_VERSION'), + ('CRAY_%s_PREFIX_DIR', 'CRAY_%s_VERSION'), + ('CRAY_%s_DIR', 'CRAY_%s_VERSION'), + ('CRAY_%s_ROOT', 'CRAY_%s_VERSION'), + ('%s_PREFIX', '%s_VERSION'), + ('%s_DIR', '%s_VERSION'), + ('%s_ROOT', '%s_VERSION'), + ('%s_HOME', '%s_VERSION'), + ] + + for prefix_var_name, version_var_name in var_name_pairs: + prefix_var_name = prefix_var_name % soft_name_in_mod_name + version_var_name = version_var_name % soft_name_in_mod_name + + prefix = self.modules_tool.get_setenv_value_from_modulefile(mod_name, prefix_var_name) + version = self.modules_tool.get_setenv_value_from_modulefile(mod_name, version_var_name) + + # we only have a hit when values for *both* variables are found + if prefix and version: + + if 'name' not in existing_metadata: + res['name'] = [soft_name] + + # if a version is already set in the available metadata, we retain it + if 'version' not in existing_metadata: + res['version'] = [version] + self.log.info('setting external module %s version to be %s', mod_name, version) + + # if a prefix is already set in the available metadata, we retain it + if 'prefix' not in existing_metadata: + res['prefix'] = prefix + self.log.info('setting external module %s prefix to be %s', mod_name, prefix_var_name) + break - return dependency + return res + + def handle_external_module_metadata(self, mod_name): + """ + Helper function for _parse_dependency; collects metadata for external module dependencies. + + :param mod_name: name of external module to collect metadata for + """ + partial_mod_name = mod_name.split('/')[0] + + # check whether existing metadata for external modules already has metadata for this module; + # first using full module name (as it is provided), for example 'cray-netcdf/4.6.1.3', + # then with partial module name, for example 'cray-netcdf' + metadata = self.external_modules_metadata.get(mod_name, {}) + self.log.info("Available metadata for external module %s: %s", mod_name, metadata) + + partial_mod_name_metadata = self.external_modules_metadata.get(partial_mod_name, {}) + self.log.info("Available metadata for external module using partial module name %s: %s", + partial_mod_name, partial_mod_name_metadata) + + for key in partial_mod_name_metadata: + if key not in metadata: + metadata[key] = partial_mod_name_metadata[key] + + self.log.info("Combined available metadata for external module %s: %s", mod_name, metadata) + + # if not all metadata is available (name/version/prefix), probe external module to collect more metadata; + # first with full module name, and then with partial module name if first probe didn't return anything; + # note: result of probe_external_module_metadata only contains metadata for keys that were not set yet + if not all(key in metadata for key in ['name', 'prefix', 'version']): + self.log.info("Not all metadata found yet for external module %s, probing module...", mod_name) + probed_metadata = self.probe_external_module_metadata(mod_name, existing_metadata=metadata) + if probed_metadata: + self.log.info("Extra metadata found by probing external module %s: %s", mod_name, probed_metadata) + metadata.update(probed_metadata) + else: + self.log.info("No extra metadata found by probing %s, trying with partial module name...", mod_name) + probed_metadata = self.probe_external_module_metadata(partial_mod_name, existing_metadata=metadata) + self.log.info("Extra metadata for external module %s found by probing partial module name %s: %s", + mod_name, partial_mod_name, probed_metadata) + metadata.update(probed_metadata) + + self.log.info("Obtained metadata after module probing: %s", metadata) + + return {'external_module_metadata': metadata} def handle_multi_deps(self): """ @@ -1450,17 +1587,32 @@ def generate_template_values(self): def _generate_template_values(self, ignore=None): """Actual code to generate the template values""" - if self.template_values is None: - self.template_values = {} # step 0. self.template_values can/should be updated from outside easyconfig - # (eg the run_setp code in EasyBlock) + # (eg the run_step code in EasyBlock) # step 1-3 work with easyconfig.templates constants # disable templating with creating dict with template values to avoid looping back to here via __getitem__ prev_enable_templating = self.enable_templating + self.enable_templating = False - template_values = template_constant_dict(self, ignore=ignore) + + if self.template_values is None: + # if no template values are set yet, initiate with a minimal set of template values; + # this is important for easyconfig that use %(version_minor)s to define 'toolchain', + # which is a pretty weird use case, but fine... + self.template_values = template_constant_dict(self, ignore=ignore) + + self.enable_templating = prev_enable_templating + + # grab toolchain instance with templating support enabled, + # which is important in case the Toolchain instance was not created yet + toolchain = self.toolchain + + # get updated set of template values, now with toolchain instance + # (which is used to define the %(mpi_cmd_prefix)s template) + self.enable_templating = False + template_values = template_constant_dict(self, ignore=ignore, toolchain=toolchain) self.enable_templating = prev_enable_templating # update the template_values dict @@ -1672,8 +1824,8 @@ def get_easyblock_class(easyblock, name=None, error_on_failed_import=True, error def is_generic_easyblock(easyblock): """Return whether specified easyblock name is a generic easyblock or not.""" - - return easyblock and not easyblock.startswith(EASYBLOCK_CLASS_PREFIX) + _log.deprecated("is_generic_easyblock function was moved to easybuild.tools.filetools", '5.0') + return filetools.is_generic_easyblock(easyblock) def get_module_path(name, generic=None, decode=True): @@ -1688,7 +1840,7 @@ def get_module_path(name, generic=None, decode=True): return None if generic is None: - generic = is_generic_easyblock(name) + generic = filetools.is_generic_easyblock(name) # example: 'EB_VSC_minus_tools' should result in 'vsc_tools' if decode: @@ -1697,7 +1849,7 @@ def get_module_path(name, generic=None, decode=True): modpath = ['easybuild', 'easyblocks'] if generic: - modpath.append('generic') + modpath.append(GENERIC_EASYBLOCK_PKG) return '.'.join(modpath + [module_name]) @@ -1890,10 +2042,29 @@ def robot_find_easyconfig(name, version): res = None for path in paths: + + if build_option('ignore_index'): + _log.info("Ignoring index for %s...", path) + path_index = [] + elif path in _path_indexes: + path_index = _path_indexes[path] + _log.info("Found loaded index for %s", path) + elif os.path.exists(path): + path_index = load_index(path) + if path_index is None: + _log.info("No index found for %s, so creating it...", path) + path_index = create_index(path) + else: + _log.info("Loaded index for %s", path) + + _path_indexes[path] = path_index + else: + path_index = [] + easyconfigs_paths = create_paths(path, name, version) for easyconfig_path in easyconfigs_paths: _log.debug("Checking easyconfig path %s" % easyconfig_path) - if os.path.isfile(easyconfig_path): + if easyconfig_path in path_index or os.path.isfile(easyconfig_path): _log.debug("Found easyconfig file for name %s, version %s at %s" % (name, version, easyconfig_path)) _easyconfig_files_cache[key] = os.path.abspath(easyconfig_path) res = _easyconfig_files_cache[key] diff --git a/easybuild/framework/easyconfig/format/format.py b/easybuild/framework/easyconfig/format/format.py index 0ac6c380f8..85b2f239df 100644 --- a/easybuild/framework/easyconfig/format/format.py +++ b/easybuild/framework/easyconfig/format/format.py @@ -632,8 +632,8 @@ def parse(self, txt, **kwargs): """Parse the txt according to this format. This is highly version specific""" raise NotImplementedError - def dump(self, ecfg, default_values, templ_const, templ_val): - """Dump easyconfig according to this format. This is higly version specific""" + def dump(self, ecfg, default_values, templ_const, templ_val, toolchain_hierarchy=None): + """Dump easyconfig according to this format. This is highly version specific""" raise NotImplementedError def extract_comments(self, rawtxt): diff --git a/easybuild/framework/easyconfig/format/one.py b/easybuild/framework/easyconfig/format/one.py index 8534497293..88d043bc61 100644 --- a/easybuild/framework/easyconfig/format/one.py +++ b/easybuild/framework/easyconfig/format/one.py @@ -65,15 +65,17 @@ _log = fancylogger.getLogger('easyconfig.format.one', fname=False) -def dump_dependency(dep, toolchain): +def dump_dependency(dep, toolchain, toolchain_hierarchy=None): """Dump parsed dependency in tuple format""" + if not toolchain_hierarchy: + toolchain_hierarchy = [toolchain] if dep['external_module']: res = "(%s, EXTERNAL_MODULE)" % quote_py_str(dep['full_mod_name']) else: - # mininal spec: (name, version) + # minimal spec: (name, version) tup = (dep['name'], dep['version']) - if dep['toolchain'] != toolchain: + if all(dep['toolchain'] != subtoolchain for subtoolchain in toolchain_hierarchy): if dep[SYSTEM_TOOLCHAIN_NAME]: tup += (dep['versionsuffix'], True) else: @@ -260,7 +262,7 @@ def _find_param_with_comments(self, key, val, templ_const, templ_val): return res - def _find_defined_params(self, ecfg, keyset, default_values, templ_const, templ_val): + def _find_defined_params(self, ecfg, keyset, default_values, templ_const, templ_val, toolchain_hierarchy=None): """ Determine parameters in the dumped easyconfig file which have a non-default value. """ @@ -279,12 +281,18 @@ def _find_defined_params(self, ecfg, keyset, default_values, templ_const, templ_ # the way that builddependencies are constructed with multi_deps # we just need to dump the first entry without the dependencies # that are listed in multi_deps - valstr = [dump_dependency(d, ecfg['toolchain']) for d in val[0] - if d['name'] not in ecfg['multi_deps']] + valstr = [ + dump_dependency(d, ecfg['toolchain'], toolchain_hierarchy=toolchain_hierarchy) + for d in val[0] if d['name'] not in ecfg['multi_deps'] + ] else: - valstr = [[dump_dependency(d, ecfg['toolchain']) for d in dep] for dep in val] + valstr = [ + [dump_dependency(d, ecfg['toolchain'], toolchain_hierarchy=toolchain_hierarchy) + for d in dep] for dep in val + ] else: - valstr = [dump_dependency(d, ecfg['toolchain']) for d in val] + valstr = [dump_dependency(d, ecfg['toolchain'], toolchain_hierarchy=toolchain_hierarchy) + for d in val] elif key == 'toolchain': valstr = "{'name': '%(name)s', 'version': '%(version)s'}" % ecfg[key] else: @@ -299,7 +307,7 @@ def _find_defined_params(self, ecfg, keyset, default_values, templ_const, templ_ return eclines, printed_keys - def dump(self, ecfg, default_values, templ_const, templ_val): + def dump(self, ecfg, default_values, templ_const, templ_val, toolchain_hierarchy=None): """ Dump easyconfig in format v1. @@ -307,12 +315,14 @@ def dump(self, ecfg, default_values, templ_const, templ_val): :param default_values: default values for easyconfig parameters :param templ_const: known template constants :param templ_val: known template values + :param toolchain_hierarchy: hierarchy of toolchains for easyconfig """ # include header comments first dump = self.comments['header'][:] # print easyconfig parameters ordered and in groups specified above - params, printed_keys = self._find_defined_params(ecfg, GROUPED_PARAMS, default_values, templ_const, templ_val) + params, printed_keys = self._find_defined_params(ecfg, GROUPED_PARAMS, default_values, templ_const, templ_val, + toolchain_hierarchy=toolchain_hierarchy) dump.extend(params) # print other easyconfig parameters at the end diff --git a/easybuild/framework/easyconfig/format/yeb.py b/easybuild/framework/easyconfig/format/yeb.py index 6215500f8e..4e59b4892b 100644 --- a/easybuild/framework/easyconfig/format/yeb.py +++ b/easybuild/framework/easyconfig/format/yeb.py @@ -126,7 +126,7 @@ def _inject_constants_dict(self, txt): return full_txt - def dump(self, ecfg, default_values, templ_const, templ_val): + def dump(self, ecfg, default_values, templ_const, templ_val, toolchain_hierarchy=None): """Dump parsed easyconfig in .yeb format""" raise NotImplementedError("Dumping of .yeb easyconfigs not supported yet") diff --git a/easybuild/framework/easyconfig/parser.py b/easybuild/framework/easyconfig/parser.py index ee21fcd558..bb432724c3 100644 --- a/easybuild/framework/easyconfig/parser.py +++ b/easybuild/framework/easyconfig/parser.py @@ -226,6 +226,7 @@ def get_config_dict(self, validate=True): return cfg - def dump(self, ecfg, default_values, templ_const, templ_val): + def dump(self, ecfg, default_values, templ_const, templ_val, toolchain_hierarchy=None): """Dump easyconfig in format it was parsed from.""" - return self._formatter.dump(ecfg, default_values, templ_const, templ_val) + return self._formatter.dump(ecfg, default_values, templ_const, templ_val, + toolchain_hierarchy=toolchain_hierarchy) diff --git a/easybuild/framework/easyconfig/templates.py b/easybuild/framework/easyconfig/templates.py index f024866fd7..9629759dae 100644 --- a/easybuild/framework/easyconfig/templates.py +++ b/easybuild/framework/easyconfig/templates.py @@ -77,6 +77,7 @@ # software names for which to define ver and shortver templates TEMPLATE_SOFTWARE_VERSIONS = [ # software name, prefix for *ver and *shortver + ('CUDA', 'cuda'), ('Java', 'java'), ('Perl', 'perl'), ('Python', 'py'), @@ -145,7 +146,7 @@ # versionmajor, versionminor, versionmajorminor (eg '.'.join(version.split('.')[:2])) ) -def template_constant_dict(config, ignore=None, skip_lower=None): +def template_constant_dict(config, ignore=None, skip_lower=None, toolchain=None): """Create a dict for templating the values in the easyconfigs. - config is a dict with the structure of EasyConfig._config """ @@ -222,12 +223,24 @@ def template_constant_dict(config, ignore=None, skip_lower=None): for dep in deps: if isinstance(dep, dict): dep_name, dep_version = dep['name'], dep['version'] + + # take into account dependencies marked as external modules, + # where name/version may have to be harvested from metadata available for that external module + if dep.get('external_module', False): + metadata = dep.get('external_module_metadata', {}) + if dep_name is None: + # name is a list in metadata, just take first value (if any) + dep_name = metadata.get('name', [None])[0] + if dep_version is None: + # version is a list in metadata, just take first value (if any) + dep_version = metadata.get('version', [None])[0] + elif isinstance(dep, (list, tuple)): dep_name, dep_version = dep[0], dep[1] else: raise EasyBuildError("Unexpected type for dependency: %s", dep) - if isinstance(dep_name, string_type) and dep_name.lower() == name.lower(): + if isinstance(dep_name, string_type) and dep_name.lower() == name.lower() and dep_version: dep_version = pick_dep_version(dep_version) template_values['%sver' % pref] = dep_version dep_version_parts = dep_version.split('.') @@ -257,6 +270,17 @@ def template_constant_dict(config, ignore=None, skip_lower=None): except Exception: _log.warning("Failed to get .lower() for name %s value %s (type %s)", name, value, type(value)) + # step 5. add additional conditional templates + if toolchain is not None and hasattr(toolchain, 'mpi_cmd_prefix'): + try: + # get prefix for commands to be run with mpi runtime using default number of ranks + mpi_cmd_prefix = toolchain.mpi_cmd_prefix() + if mpi_cmd_prefix is not None: + template_values['mpi_cmd_prefix'] = mpi_cmd_prefix + except EasyBuildError as err: + # don't fail just because we couldn't resolve this template + _log.warning("Failed to create mpi_cmd_prefix template, error was:\n%s", err) + return template_values diff --git a/easybuild/framework/easyconfig/tools.py b/easybuild/framework/easyconfig/tools.py index 7d717d6258..2a3260ae80 100644 --- a/easybuild/framework/easyconfig/tools.py +++ b/easybuild/framework/easyconfig/tools.py @@ -90,7 +90,7 @@ def skip_available(easyconfigs, modtool): """Skip building easyconfigs for existing modules.""" module_names = [ec['full_mod_name'] for ec in easyconfigs] - modules_exist = modtool.exist(module_names) + modules_exist = modtool.exist(module_names, maybe_partial=False) retained_easyconfigs = [] for ec, mod_name, mod_exists in zip(easyconfigs, module_names, modules_exist): if mod_exists: @@ -604,17 +604,21 @@ def dump_env_script(easyconfigs): def categorize_files_by_type(paths): """ - Splits list of filepaths into a 3 separate lists: easyconfigs, files to delete and patch files + Splits list of filepaths into a 4 separate lists: easyconfigs, files to delete, patch files and + files with extension .py """ res = { 'easyconfigs': [], 'files_to_delete': [], 'patch_files': [], + 'py_files': [], } for path in paths: if path.startswith(':'): res['files_to_delete'].append(path[1:]) + elif path.endswith('.py'): + res['py_files'].append(path) # file must exist in order to check whether it's a patch file elif os.path.isfile(path) and is_patch_file(path): res['patch_files'].append(path) diff --git a/easybuild/framework/easyconfig/tweak.py b/easybuild/framework/easyconfig/tweak.py index 37403b352c..e39dfae559 100644 --- a/easybuild/framework/easyconfig/tweak.py +++ b/easybuild/framework/easyconfig/tweak.py @@ -36,6 +36,7 @@ :author: Maxime Boissonneault (Universite Laval, Calcul Quebec, Compute Canada) """ import copy +import functools import glob import os import re @@ -47,13 +48,16 @@ from easybuild.framework.easyconfig.default import get_easyconfig_parameter_default from easybuild.framework.easyconfig.easyconfig import EasyConfig, create_paths, process_easyconfig from easybuild.framework.easyconfig.easyconfig import get_toolchain_hierarchy, ActiveMNS +from easybuild.framework.easyconfig.format.one import EB_FORMAT_EXTENSION from easybuild.framework.easyconfig.format.format import DEPENDENCY_PARAMETERS +from easybuild.framework.easyconfig.parser import fetch_parameters_from_easyconfig +from easybuild.framework.easyconfig.tools import alt_easyconfig_paths from easybuild.toolchains.gcccore import GCCcore from easybuild.tools.build_log import EasyBuildError, print_warning from easybuild.tools.config import build_option from easybuild.tools.filetools import read_file, write_file from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version -from easybuild.tools.robot import resolve_dependencies, robot_find_easyconfig +from easybuild.tools.robot import resolve_dependencies, robot_find_easyconfig, search_easyconfigs from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME from easybuild.tools.toolchain.toolchain import TOOLCHAIN_CAPABILITIES from easybuild.tools.utilities import flatten, nub, quote_str @@ -79,6 +83,9 @@ def ec_filename_for(path): def tweak(easyconfigs, build_specs, modtool, targetdirs=None): """Tweak list of easyconfigs according to provided build specifications.""" + # keep track of originally listed easyconfigs (via their path) + listed_ec_paths = [ec['spec'] for ec in easyconfigs] + tweaked_ecs_path, tweaked_ecs_deps_path = None, None if targetdirs is not None: tweaked_ecs_path, tweaked_ecs_deps_path = targetdirs @@ -89,71 +96,74 @@ def tweak(easyconfigs, build_specs, modtool, targetdirs=None): toolchains) # Toolchain is unique, let's store it source_toolchain = easyconfigs[-1]['ec']['toolchain'] - modifying_toolchains = False + modifying_toolchains_or_deps = False target_toolchain = {} src_to_dst_tc_mapping = {} revert_to_regex = False - if 'toolchain_name' in build_specs or 'toolchain_version' in build_specs: - keys = build_specs.keys() + if 'update_deps' in build_specs: + _log.experimental("Found build spec 'update_deps': Attempting to update dependency versions.") - # Make sure there are no more build_specs, as combining --try-toolchain* with other options is currently not - # supported - if any(key not in ['toolchain_name', 'toolchain_version', 'toolchain'] for key in keys): - warning_msg = "Combining --try-toolchain* with other build options is not fully supported: using regex" - print_warning(warning_msg, silent=build_option('silent')) - revert_to_regex = True + if any(key in build_specs for key in ['toolchain', 'toolchain_name', 'toolchain_version', 'update_deps']): + if not build_option('map_toolchains'): + if 'update_deps' in build_specs: + raise EasyBuildError("Cannot use --try-update-deps without setting --map-toolchains") + else: + msg = "Mapping of (sub)toolchains (with --map-toolchains) disabled, so falling back to regex mode, " + msg += "disabling recursion and not changing (sub)toolchains for dependencies" + _log.info(msg) + revert_to_regex = True if not revert_to_regex: - # we're doing something with the toolchain, - # so build specifications should be applied to whole dependency graph; + # we're doing something that involves the toolchain hierarchy; # obtain full dependency graph for specified easyconfigs; # easyconfigs will be ordered 'top-to-bottom' (toolchains and dependencies appearing first) - modifying_toolchains = True - - if 'toolchain_name' in keys: - target_toolchain['name'] = build_specs['toolchain_name'] + _log.debug("Updating toolchain and/or dependencies requested...applying build specifications recursively " + "(where appropriate):\n%s", build_specs) + modifying_toolchains_or_deps = True + pruned_build_specs = copy.copy(build_specs) + + update_dependencies = pruned_build_specs.pop('update_deps', None) + if 'toolchain' in pruned_build_specs: + target_toolchain = pruned_build_specs.pop('toolchain') + pruned_build_specs.pop('toolchain_name', '') + pruned_build_specs.pop('toolchain_version', '') else: - target_toolchain['name'] = source_toolchain['name'] + target_toolchain['name'] = pruned_build_specs.pop('toolchain_name', source_toolchain['name']) + target_toolchain['version'] = pruned_build_specs.pop('toolchain_version', source_toolchain['version']) - if 'toolchain_version' in keys: - target_toolchain['version'] = build_specs['toolchain_version'] - else: - target_toolchain['version'] = source_toolchain['version'] - - if build_option('map_toolchains'): - try: - src_to_dst_tc_mapping = map_toolchain_hierarchies(source_toolchain, target_toolchain, modtool) - except EasyBuildError as err: - # make sure exception was raised by match_minimum_tc_specs because toolchain mapping didn't work - if "No possible mapping from source toolchain" in err.msg: - error_msg = err.msg + '\n' - error_msg += "Toolchain %s is not equivalent to toolchain %s in terms of capabilities. " - error_msg += "(If you know what you are doing, " - error_msg += "you can use --disable-map-toolchains to proceed anyway.)" - raise EasyBuildError(error_msg, target_toolchain['name'], source_toolchain['name']) - else: - # simply re-raise the exception if something else went wrong - raise err - else: - msg = "Mapping of (sub)toolchains disabled, so falling back to regex mode, " - msg += "disabling recursion and not changing (sub)toolchains for dependencies" - _log.info(msg) - revert_to_regex = True - modifying_toolchains = False + try: + src_to_dst_tc_mapping = map_toolchain_hierarchies(source_toolchain, target_toolchain, modtool) + except EasyBuildError as err: + # make sure exception was raised by match_minimum_tc_specs because toolchain mapping didn't work + if "No possible mapping from source toolchain" in err.msg: + error_msg = err.msg + '\n' + error_msg += "Toolchain %s is not equivalent to toolchain %s in terms of capabilities. " + error_msg += "(If you know what you are doing, " + error_msg += "you can use --disable-map-toolchains to proceed anyway.)" + raise EasyBuildError(error_msg, target_toolchain['name'], source_toolchain['name']) + else: + # simply re-raise the exception if something else went wrong + raise err - if not revert_to_regex: - _log.debug("Applying build specifications recursively (no software name/version found): %s", build_specs) orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True) # Filter out the toolchain hierarchy (which would only appear if we are applying build_specs recursively) - # We can leave any dependencies they may have as they will only be used if required (or originally listed) - _log.debug("Filtering out toolchain hierarchy for %s", source_toolchain) + # Also filter any dependencies of the hierarchy (unless they were originally listed for tweaking) + _log.debug("Filtering out toolchain hierarchy and dependencies for %s", source_toolchain) + if source_toolchain['name'] != SYSTEM_TOOLCHAIN_NAME: + path = robot_find_easyconfig(source_toolchain['name'], source_toolchain['version']) + toolchain_ec = process_easyconfig(path) + toolchain_deps = resolve_dependencies(toolchain_ec, modtool, retain_all_deps=True) + toolchain_dep_paths = [dep['spec'] for dep in toolchain_deps] + # only retain toolchain dependencies that are not in original list of easyconfigs to tweak + toolchain_dep_paths = [td for td in toolchain_dep_paths if td not in listed_ec_paths] + else: + toolchain_dep_paths = [] i = 0 while i < len(orig_ecs): - tc_names = [tc['name'] for tc in get_toolchain_hierarchy(source_toolchain)] - if orig_ecs[i]['ec']['name'] in tc_names: + if orig_ecs[i]['spec'] in toolchain_dep_paths: # drop elements in toolchain hierarchy del orig_ecs[i] else: @@ -167,9 +177,6 @@ def tweak(easyconfigs, build_specs, modtool, targetdirs=None): orig_ecs = easyconfigs _log.debug("Software name/version found, so not applying build specifications recursively: %s" % build_specs) - # keep track of originally listed easyconfigs (via their path) - listed_ec_paths = [ec['spec'] for ec in easyconfigs] - # generate tweaked easyconfigs, and continue with those instead tweaked_easyconfigs = [] for orig_ec in orig_ecs: @@ -183,10 +190,12 @@ def tweak(easyconfigs, build_specs, modtool, targetdirs=None): new_ec_file = None verification_build_specs = copy.copy(build_specs) if orig_ec['spec'] in listed_ec_paths: - if modifying_toolchains: + if modifying_toolchains_or_deps: if tc_name in src_to_dst_tc_mapping: new_ec_file = map_easyconfig_to_target_tc_hierarchy(orig_ec['spec'], src_to_dst_tc_mapping, - tweaked_ecs_path) + targetdir=tweaked_ecs_path, + update_build_specs=pruned_build_specs, + update_dep_versions=update_dependencies) # Need to update the toolchain in the build_specs to match the toolchain mapping keys = verification_build_specs.keys() if 'toolchain_name' in keys: @@ -203,12 +212,14 @@ def tweak(easyconfigs, build_specs, modtool, targetdirs=None): tweaked_easyconfigs.extend(new_ecs) else: # Place all tweaked dependency easyconfigs in the directory appended to the robot path - if modifying_toolchains: + if modifying_toolchains_or_deps: if tc_name in src_to_dst_tc_mapping: - new_ec_file = map_easyconfig_to_target_tc_hierarchy(orig_ec['spec'], src_to_dst_tc_mapping, - targetdir=tweaked_ecs_deps_path) + # Note pruned_build_specs are not passed down for dependencies + map_easyconfig_to_target_tc_hierarchy(orig_ec['spec'], src_to_dst_tc_mapping, + targetdir=tweaked_ecs_deps_path, + update_dep_versions=update_dependencies) else: - new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_deps_path) + tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_deps_path) return tweaked_easyconfigs @@ -821,19 +832,160 @@ def map_toolchain_hierarchies(source_toolchain, target_toolchain, modtool): return tc_mapping -def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir=None): +def map_versionsuffixes_cache(func): + """Function decorator to cache (and retrieve cached) versionsuffixes mapping between toolchains.""" + cache = {} + + @functools.wraps(func) + def cache_aware_func(software_name, original_toolchain, toolchain_mapping): + """Look up original_toolchain in cache first, determine and cache it if not available yet.""" + # No need for toolchain_mapping to change to be part of the key, it is unique in this context + cache_key = (software_name, original_toolchain['name'], original_toolchain['version']) + + # fetch from cache if available, cache it if it's not + if cache_key in cache: + _log.debug("Using cache to return version suffix mapping for toolchain %s: %s", str(cache_key), + cache[cache_key]) + else: + versionsuffix_mappings = func(software_name, original_toolchain, toolchain_mapping) + cache[cache_key] = versionsuffix_mappings + return cache[cache_key] + + # Expose clear method of cache to wrapped function + cache_aware_func.clear = cache.clear + + return cache_aware_func + + +@map_versionsuffixes_cache +def map_common_versionsuffixes(software_name, original_toolchain, toolchain_mapping): + """ + Create a mapping of common versionssuffixes (like `-Python-%(pyver)s`) between toolchains + + :param software_name: Name of software + :param original_toolchain: original toolchain + :param toolchain_mapping: toolchain mapping from that containing original to target + :return: dictionary of possible mappings + """ + orig_toolchain_hierarchy = get_toolchain_hierarchy(original_toolchain) + + versionsuffix_mappings = {} + + # Find all versions in the original toolchain hierarchy and register what they would be mapped to + for toolchain in orig_toolchain_hierarchy: + prefix_stub = '%s-' % software_name + cand_paths, toolchain_suffix = get_matching_easyconfig_candidates(prefix_stub, toolchain) + for path in cand_paths: + + version, versionsuffix = fetch_parameters_from_easyconfig(read_file(path), ['version', 'versionsuffix']) + + if version is None: + raise EasyBuildError("Failed to extract 'version' value from %s", path) + else: + major_version = version.split('.')[0] + try: + # make sure we have a have an integer value for the major version + int(major_version) + except ValueError: + _log.warning("Cannot extract major version for %s from %s", prefix_stub, version) + + # Use these values to construct a dependency + software_as_dep = { + 'name': software_name, + 'toolchain': toolchain, + 'version': version, + 'versionsuffix': versionsuffix or '', + } + # See what this dep would be mapped to + version_matches = find_potential_version_mappings(software_as_dep, toolchain_mapping) + if version_matches: + target_version = version_matches[0]['version'] + if LooseVersion(target_version) > LooseVersion(version): + original_suffix = '-%s-%s' % (software_name, version) + mapped_suffix = '-%s-%s' % (software_name, target_version) + # Make sure mapping is unique + if original_suffix in versionsuffix_mappings: + if mapped_suffix != versionsuffix_mappings[original_suffix]: + raise EasyBuildError("No unique versionsuffix mapping for %s in %s toolchain " + "hierarchy to %s toolchain hierarchy", original_suffix, + original_toolchain, toolchain_mapping[original_toolchain['name']]) + else: + versionsuffix_mappings[original_suffix] = mapped_suffix + + _log.info("Identified version suffix mappings: %s", versionsuffix_mappings) + return versionsuffix_mappings + + +def get_matching_easyconfig_candidates(prefix_stub, toolchain): + """ + Find easyconfigs that match specified requirements w.r.t. toolchain and partial filename pattern. + + :param prefix_stub: stub used in regex (e.g., 'Python-' or 'Python-2') + :param toolchain: the toolchain to use with the search + :return: list of candidate paths, toolchain_suffix of candidates + """ + if toolchain['name'] == SYSTEM_TOOLCHAIN_NAME: + toolchain_suffix = EB_FORMAT_EXTENSION + else: + toolchain_suffix = '-%s-%s' % (toolchain['name'], toolchain['version']) + regex_search_query = '^%s.*' % prefix_stub + toolchain_suffix + cand_paths = search_easyconfigs(regex_search_query, consider_extra_paths=False, print_result=False, + case_sensitive=True) + return cand_paths, toolchain_suffix + + +def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir=None, update_build_specs=None, + update_dep_versions=False): """ Take an easyconfig spec, parse it, map it to a target toolchain and dump it out - :param ec_spec: Location of original easyconfig file - :param toolchain_mapping: Mapping between source toolchain and target toolchain - :param targetdir: Directory to dump the modified easyconfig file in + :param ec_spec: location of original easyconfig file + :param toolchain_mapping: mapping between source toolchain and target toolchain + :param targetdir: directory to dump the modified easyconfig file in + :param update_build_specs: dict with names and values of easyconfig parameters to tweak + :param update_dep_versions: boolean indicating whether dependency versions should be updated :return: Location of the modified easyconfig file """ # Fully parse the original easyconfig parsed_ec = process_easyconfig(ec_spec, validate=False)[0]['ec'] + versonsuffix_mapping = {} + + if update_dep_versions: + # We may need to update the versionsuffix if it is like, for example, `-Python-2.7.8` + versonsuffix_mapping = map_common_versionsuffixes('Python', parsed_ec['toolchain'], toolchain_mapping) + + if update_build_specs is not None: + if 'version' in update_build_specs: + + # take into account that version in exts_list may have to be updated as well + if 'exts_list' in parsed_ec and parsed_ec['exts_list']: + _log.warning("Found 'exts_list' in %s, will only update extension version of %s (if applicable)", + ec_spec, parsed_ec['name']) + for idx, extension in enumerate(parsed_ec['exts_list']): + if isinstance(extension, tuple) and extension[0] == parsed_ec['name']: + ext_as_list = list(extension) + # in the extension tuple the version is the second element + if len(ext_as_list) > 1 and ext_as_list[1] == parsed_ec['version']: + ext_as_list[1] = update_build_specs['version'] + # also need to clear the checksum (if it exists) + if len(ext_as_list) > 2: + ext_as_list[2].pop('checksums', None) + # now replace the tuple in the dict of parameters + # to update the original dep dict, we need to get a reference with templating disabled... + parsed_ec.get_ref('exts_list')[idx] = tuple(ext_as_list) + _log.info("Updated extension found in %s with new version", ec_spec) + + # automagically clear out list of checksums if software version is being tweaked + if 'checksums' not in update_build_specs: + update_build_specs['checksums'] = [] + _log.warning("Tweaking version: checksums cleared, verification disabled.") + + # update the keys according to the build specs + for key in update_build_specs: + parsed_ec[key] = update_build_specs[key] + # Replace the toolchain if the mapping exists tc_name = parsed_ec['toolchain']['name'] if tc_name in toolchain_mapping: @@ -844,6 +996,7 @@ def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir= # Replace the toolchains of all the dependencies for key in DEPENDENCY_PARAMETERS: # loop over a *copy* of dependency dicts (with resolved templates); + # to update the original dep dict, we need to get a reference with templating disabled... val = parsed_ec[key] orig_val = parsed_ec.get_ref(key) @@ -853,25 +1006,142 @@ def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir= orig_val = flatten(orig_val) for idx, dep in enumerate(val): + # reference to original dep dict, this is the one we should be updating orig_dep = orig_val[idx] + # skip dependencies that are marked as external modules if dep['external_module']: continue dep_tc_name = dep['toolchain']['name'] if dep_tc_name in toolchain_mapping: orig_dep['toolchain'] = toolchain_mapping[dep_tc_name] - # Replace the binutils version (if necessary) + + dep_changed = False + + # replace the binutils version (if necessary) if 'binutils' in toolchain_mapping and (dep['name'] == 'binutils' and dep_tc_name == GCCcore.NAME): orig_dep.update(toolchain_mapping['binutils']) - # set module names - orig_dep['short_mod_name'] = ActiveMNS().det_short_module_name(dep) - orig_dep['full_mod_name'] = ActiveMNS().det_full_module_name(dep) - # Determine the name of the modified easyconfig and dump it to target_dir + dep_changed = True + + elif update_dep_versions: + # search for available updates for this dependency: + # first get highest version candidate paths for this (include search through subtoolchains) + potential_version_mappings = find_potential_version_mappings(dep, toolchain_mapping, + versionsuffix_mapping=versonsuffix_mapping) + # only highest version match is retained by default in potential_version_mappings, + # compare that version to the original version and replace if appropriate (upgrades only). + if potential_version_mappings: + highest_version_match = potential_version_mappings[0]['version'] + if LooseVersion(highest_version_match) > LooseVersion(dep['version']): + _log.info("Updating version of %s dependency from %s to %s", dep['name'], dep['version'], + highest_version_match) + _log.info("Depending on your configuration, this will be resolved with one of the following " + "easyconfigs: \n%s", '\n'.join(cand['path'] for cand in potential_version_mappings)) + orig_dep['version'] = highest_version_match + if orig_dep['versionsuffix'] in versonsuffix_mapping: + dep['versionsuffix'] = versonsuffix_mapping[orig_dep['versionsuffix']] + orig_dep['versionsuffix'] = versonsuffix_mapping[orig_dep['versionsuffix']] + dep_changed = True + + if dep_changed: + _log.debug("Modified dependency %s of %s", dep['name'], ec_spec) + + # determine the name of the modified easyconfig and dump it to target_dir + if parsed_ec['versionsuffix'] in versonsuffix_mapping: + parsed_ec['versionsuffix'] = versonsuffix_mapping[parsed_ec['versionsuffix']] ec_filename = '%s-%s.eb' % (parsed_ec['name'], det_full_ec_version(parsed_ec)) tweaked_spec = os.path.join(targetdir or tempfile.gettempdir(), ec_filename) parsed_ec.dump(tweaked_spec, always_overwrite=False, backup=True) - _log.debug("Dumped easyconfig tweaked via --try-toolchain* to %s", tweaked_spec) + _log.debug("Dumped easyconfig tweaked via --try-* to %s", tweaked_spec) return tweaked_spec + + +def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mapping=None, highest_versions_only=True): + """ + Find potential version mapping for a dependency in a new hierarchy + + :param dep: dependency specification (dict) + :param toolchain_mapping: toolchain mapping used for search + :param versionsuffix_mapping: mapping of version suffixes + (required by software with a special version suffix, such as Python packages) + :param highest_versions_only: only return highest versions + :return: list of dependencies that match + """ + if versionsuffix_mapping is None: + versionsuffix_mapping = {} + + # Find the target toolchain and create the hierarchy to search within + dep_tc_name = dep['toolchain']['name'] + if dep_tc_name in toolchain_mapping: + search_toolchain = toolchain_mapping[dep_tc_name] + else: + # dummy + search_toolchain = dep['toolchain'] + + toolchain_hierarchy = get_toolchain_hierarchy(search_toolchain) + + # Figure out what precedes the version (i.e. name + versionprefix (if any)) + versionprefix = dep.get('versionprefix', '') + prefix_to_version = dep['name'] + '-' + versionprefix + + # Figure out the main versionsuffix (altered depending on toolchain in the loop below) + versionsuffix = dep.get('versionsuffix', '') + # If versionsuffix is in our mapping then we expect it to be updated + if versionsuffix in versionsuffix_mapping: + versionsuffix = versionsuffix_mapping[versionsuffix] + + # the candidate version is a regex string, let's be conservative and search for patch upgrade first; + # if that doesn't work look for a minor version upgrade and if that fails will we try a global search, + # i.e, a major version upgrade (assumes major.minor.xxx versioning) + candidate_ver_list = [] + version_components = dep['version'].split('.') + major_version = version_components[0] + if len(version_components) > 2: # Have something like major.minor.xxx + minor_version = version_components[1] + candidate_ver_list.append(r'%s\.%s\..*' % (major_version, minor_version)) + if len(version_components) > 1: # Have at least major.minor + candidate_ver_list.append(r'%s\..*' % major_version) + candidate_ver_list.append(r'.*') # Include a major version search + + potential_version_mappings, highest_version = [], None + + for candidate_ver in candidate_ver_list: + + # if any potential version mappings were found already at this point, we don't add more + if not potential_version_mappings: + + for toolchain in toolchain_hierarchy: + + # determine search pattern based on toolchain, version prefix/suffix & version regex + if toolchain['name'] == SYSTEM_TOOLCHAIN_NAME: + toolchain_suffix = '' + else: + toolchain_suffix = '-%s-%s' % (toolchain['name'], toolchain['version']) + full_versionsuffix = toolchain_suffix + versionsuffix + EB_FORMAT_EXTENSION + depver = '^' + prefix_to_version + candidate_ver + full_versionsuffix + cand_paths = search_easyconfigs(depver, consider_extra_paths=False, print_result=False, + case_sensitive=True) + + # filter out easyconfigs that have been tweaked in this instance, they are not relevant here + tweaked_ecs_paths, _ = alt_easyconfig_paths(tempfile.gettempdir(), tweaked_ecs=True) + cand_paths = [path for path in cand_paths if not path.startswith(tweaked_ecs_paths)] + + # add what is left to the possibilities + for path in cand_paths: + version = fetch_parameters_from_easyconfig(read_file(path), ['version'])[0] + if version: + if highest_version is None or LooseVersion(version) > LooseVersion(highest_version): + highest_version = version + else: + raise EasyBuildError("Failed to determine version from contents of %s", path) + + potential_version_mappings.append({'path': path, 'toolchain': toolchain, 'version': version}) + + if highest_versions_only and highest_version is not None: + potential_version_mappings = [d for d in potential_version_mappings if d['version'] == highest_version] + + _log.debug("Found potential version mappings for %s: %s", dep, potential_version_mappings) + return potential_version_mappings diff --git a/easybuild/framework/easyconfig/types.py b/easybuild/framework/easyconfig/types.py index fd8f2e09a7..17b199cdc2 100644 --- a/easybuild/framework/easyconfig/types.py +++ b/easybuild/framework/easyconfig/types.py @@ -446,9 +446,11 @@ def to_checksums(checksums): res = [] for checksum in checksums: # each list entry can be: - # * a string (MD5 checksum) + # * None (indicates no checksum) + # * a string (MD5 or SHA256 checksum) # * a tuple with 2 elements: checksum type + checksum value # * a list of checksums (i.e. multiple checksums for a single file) + # * a dict (filename to checksum mapping) if isinstance(checksum, string_type): res.append(checksum) elif isinstance(checksum, (list, tuple)): @@ -462,6 +464,8 @@ def to_checksums(checksums): for key, value in checksum.items(): validated_dict[key] = to_checksums(value) res.append(validated_dict) + else: + res.append(checksum) return res diff --git a/easybuild/framework/extension.py b/easybuild/framework/extension.py index a27f81dd47..b44d5759fe 100644 --- a/easybuild/framework/extension.py +++ b/easybuild/framework/extension.py @@ -37,7 +37,7 @@ import os from easybuild.framework.easyconfig.easyconfig import resolve_template -from easybuild.framework.easyconfig.templates import template_constant_dict +from easybuild.framework.easyconfig.templates import TEMPLATE_NAMES_EASYBLOCK_RUN_STEP, template_constant_dict from easybuild.tools.build_log import EasyBuildError, raise_nosupport from easybuild.tools.filetools import change_dir from easybuild.tools.run import run_cmd @@ -111,6 +111,10 @@ def __init__(self, mself, ext, extra_params=None): # construct dict with template values that can be used self.cfg.template_values.update(template_constant_dict({'name': name, 'version': version})) + # Add install/builddir templates with values from master. + for name in TEMPLATE_NAMES_EASYBLOCK_RUN_STEP: + self.cfg.template_values[name[0]] = str(getattr(self.master, name[0], None)) + # list of source/patch files: we use an empty list as default value like in EasyBlock self.src = resolve_template(self.ext.get('src', []), self.cfg.template_values) self.patches = resolve_template(self.ext.get('patches', []), self.cfg.template_values) diff --git a/easybuild/main.py b/easybuild/main.py index 69c47a7293..415321dc9a 100644 --- a/easybuild/main.py +++ b/easybuild/main.py @@ -56,7 +56,8 @@ from easybuild.tools.config import find_last_log, get_repository, get_repositorypath, build_option from easybuild.tools.containers.common import containerize from easybuild.tools.docs import list_software -from easybuild.tools.filetools import adjust_permissions, cleanup, copy_file, copy_files, read_file, write_file +from easybuild.tools.filetools import adjust_permissions, cleanup, copy_file, copy_files, dump_index, load_index +from easybuild.tools.filetools import read_file, write_file from easybuild.tools.github import check_github, close_pr, new_branch_github, find_easybuild_easyconfig from easybuild.tools.github import install_github_token, list_prs, new_pr, new_pr_from_branch, merge_pr from easybuild.tools.github import sync_branch_with_develop, sync_pr_with_develop, update_branch, update_pr @@ -255,9 +256,16 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): elif options.list_software: print(list_software(output_format=options.output_format, detailed=options.list_software == 'detailed')) + elif options.create_index: + print_msg("Creating index for %s..." % options.create_index, prefix=False) + index_fp = dump_index(options.create_index, max_age_sec=options.index_max_age) + index = load_index(options.create_index) + print_msg("Index created at %s (%d files)" % (index_fp, len(index)), prefix=False) + # non-verbose cleanup after handling GitHub integration stuff or printing terse info early_stop_options = [ options.check_github, + options.create_index, options.install_github_token, options.list_installed_software, options.list_software, @@ -291,8 +299,12 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): eb_file = find_easybuild_easyconfig() orig_paths.append(eb_file) - # last path is target when --copy-ec is used, so remove that from the list - target_path = orig_paths.pop() if options.copy_ec else None + if len(orig_paths) == 1: + # if only one easyconfig file is specified, use current directory as target directory + target_path = os.getcwd() + elif orig_paths: + # last path is target when --copy-ec is used, so remove that from the list + target_path = orig_paths.pop() if options.copy_ec else None categorized_paths = categorize_files_by_type(orig_paths) @@ -310,8 +322,12 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): if options.copy_ec: if len(determined_paths) == 1: copy_file(determined_paths[0], target_path) - else: + print_msg("%s copied to %s" % (os.path.basename(determined_paths[0]), target_path), prefix=False) + elif len(determined_paths) > 1: copy_files(determined_paths, target_path) + print_msg("%d file(s) copied to %s" % (len(determined_paths), target_path), prefix=False) + else: + raise EasyBuildError("One of more files to copy should be specified!") elif options.fix_deprecated_easyconfigs: fix_deprecated_easyconfigs(determined_paths) diff --git a/easybuild/scripts/bootstrap_eb.py b/easybuild/scripts/bootstrap_eb.py index cdb0afee25..2e8c487a2a 100644 --- a/easybuild/scripts/bootstrap_eb.py +++ b/easybuild/scripts/bootstrap_eb.py @@ -40,6 +40,7 @@ (via http://dubroy.com/blog/so-you-want-to-install-a-python-package/) """ +import codecs import copy import glob import os @@ -49,12 +50,19 @@ import sys import tempfile import traceback -import urllib2 from distutils.version import LooseVersion from hashlib import md5 +from platform import python_version +IS_PY3 = sys.version_info[0] == 3 -EB_BOOTSTRAP_VERSION = '20190922.01' +if not IS_PY3: + import urllib2 as std_urllib +else: + import urllib.request as std_urllib + + +EB_BOOTSTRAP_VERSION = '20200203.01' # argparse preferrred, optparse deprecated >=2.7 HAVE_ARGPARSE = False @@ -68,7 +76,9 @@ VSC_BASE = 'vsc-base' VSC_INSTALL = 'vsc-install' -EASYBUILD_PACKAGES = [VSC_INSTALL, VSC_BASE, 'easybuild-framework', 'easybuild-easyblocks', 'easybuild-easyconfigs'] +# Python 3 is not supported by the vsc-* packages +EASYBUILD_PACKAGES = (([] if IS_PY3 else [VSC_INSTALL, VSC_BASE]) + + ['easybuild-framework', 'easybuild-easyblocks', 'easybuild-easyconfigs']) STAGE1_SUBDIR = 'eb_stage1' @@ -127,8 +137,10 @@ def error(msg, exit=True): def mock_stdout_stderr(): """Mock stdout/stderr channels""" - # cStringIO is only available in Python 2 - from cStringIO import StringIO + try: + from cStringIO import StringIO + except ImportError: + from io import StringIO orig_stdout, orig_stderr = sys.stdout, sys.stderr sys.stdout.flush() sys.stdout = StringIO() @@ -324,7 +336,7 @@ def check_setuptools(): # check setuptools version try: - os.system(cmd_tmpl % "import setuptools; print setuptools.__version__") + os.system(cmd_tmpl % "import setuptools; print(setuptools.__version__)") setuptools_ver = LooseVersion(open(outfile).read().strip()) debug("Found setuptools version %s" % setuptools_ver) @@ -336,7 +348,7 @@ def check_setuptools(): debug("Failed to check setuptools version: %s" % err) res = False - os.system(cmd_tmpl % "from setuptools.command import easy_install; print easy_install.__file__") + os.system(cmd_tmpl % "from setuptools.command import easy_install; print(easy_install.__file__)") out = open(outfile).read().strip() debug("Location of setuptools' easy_install module: %s" % out) if 'setuptools/command/easy_install' not in out: @@ -344,7 +356,7 @@ def check_setuptools(): res = False if res is None: - os.system(cmd_tmpl % "import setuptools; print setuptools.__file__") + os.system(cmd_tmpl % "import setuptools; print(setuptools.__file__)") setuptools_loc = open(outfile).read().strip() res = os.path.dirname(os.path.dirname(setuptools_loc)) debug("Location of setuptools installation: %s" % res) @@ -523,27 +535,32 @@ def stage1(tmpdir, sourcepath, distribute_egg_dir, forcedversion): # install meta-package easybuild from PyPI if forcedversion: cmd.append('easybuild==%s' % forcedversion) + elif IS_PY3: + cmd.append('easybuild>=4.0') # Python 3 support added in EasyBuild 4 else: cmd.append('easybuild') - # install vsc-base again at the end, to avoid that the one available on the system is used instead - post_vsc_base = cmd[:] - post_vsc_base[-1] = VSC_BASE + '<2.9.0' + if not IS_PY3: + # install vsc-base again at the end, to avoid that the one available on the system is used instead + post_vsc_base = cmd[:] + post_vsc_base[-1] = VSC_BASE + '<2.9.0' if not print_debug: cmd.insert(0, '--quiet') - # install vsc-install version prior to 0.11.4, where mock was introduced as a dependency - # workaround for problem reported in https://github.com/easybuilders/easybuild-framework/issues/2712 - # also stick to vsc-base < 2.9.0 to avoid requiring 'future' Python package as dependency - for pkg in [VSC_INSTALL + '<0.11.4', VSC_BASE + '<2.9.0']: - precmd = cmd[:-1] + [pkg] - info("running pre-install command 'easy_install %s'" % (' '.join(precmd))) - run_easy_install(precmd) + # There is no support for Python3 in the older vsc-* packages and EasyBuild 4 includes working versions of vsc-* + if not IS_PY3: + # install vsc-install version prior to 0.11.4, where mock was introduced as a dependency + # workaround for problem reported in https://github.com/easybuilders/easybuild-framework/issues/2712 + # also stick to vsc-base < 2.9.0 to avoid requiring 'future' Python package as dependency + for pkg in [VSC_INSTALL + '<0.11.4', VSC_BASE + '<2.9.0']: + precmd = cmd[:-1] + [pkg] + info("running pre-install command 'easy_install %s'" % (' '.join(precmd))) + run_easy_install(precmd) info("installing EasyBuild with 'easy_install %s'\n" % (' '.join(cmd))) syntax_error_note = '\n'.join([ - "Note: a 'SyntaxError' may be reported for the easybuild/tools/py2vs3/py3.py module.", + "Note: a 'SyntaxError' may be reported for the easybuild/tools/py2vs3/py%s.py module." % ('3', '2')[IS_PY3], "You can safely ignore this message, it will not affect the functionality of the EasyBuild installation.", '', ]) @@ -632,8 +649,13 @@ def stage1(tmpdir, sourcepath, distribute_egg_dir, forcedversion): # make sure we're getting the expected EasyBuild packages import easybuild.framework import easybuild.easyblocks - import vsc.utils.fancylogger - for pkg in [easybuild.framework, easybuild.easyblocks, vsc.utils.fancylogger]: + pkgs_to_check = [easybuild.framework, easybuild.easyblocks] + # vsc is part of EasyBuild 4 + if LooseVersion(eb_version) < LooseVersion('4'): + import vsc.utils.fancylogger + pkgs_to_check.append(vsc.utils.fancylogger) + + for pkg in pkgs_to_check: if tmpdir not in pkg.__file__: error("Found another %s than expected: %s" % (pkg.__name__, pkg.__file__)) else: @@ -698,8 +720,8 @@ def stage2(tmpdir, templates, install_path, distribute_egg_dir, sourcepath): # determine download URL via PyPI's 'simple' API pkg_simple = None try: - pkg_simple = urllib2.urlopen('https://pypi.python.org/simple/%s' % pkg, timeout=10).read() - except (urllib2.URLError, urllib2.HTTPError) as err: + pkg_simple = std_urllib.urlopen('https://pypi.python.org/simple/%s' % pkg, timeout=10).read() + except (std_urllib.URLError, std_urllib.HTTPError) as err: # failing to figure out the package download URl may be OK when source tarballs are provided if sourcepath: info("Ignoring failed attempt to determine '%s' download URL since source tarballs are provided" % pkg) @@ -707,6 +729,8 @@ def stage2(tmpdir, templates, install_path, distribute_egg_dir, sourcepath): raise err if pkg_simple: + if IS_PY3: + pkg_simple = pkg_simple.decode('utf-8') pkg_url_part_regex = re.compile('/(packages/[^#]+)/%s#' % pkg_filename) res = pkg_url_part_regex.search(pkg_simple) if res: @@ -827,6 +851,8 @@ def main(): """Main script: bootstrap EasyBuild in stages.""" self_txt = open(__file__).read() + if IS_PY3: + self_txt = self_txt.encode('utf-8') info("EasyBuild bootstrap script (version %s, MD5: %s)" % (EB_BOOTSTRAP_VERSION, md5(self_txt).hexdigest())) info("Found Python %s\n" % '; '.join(sys.version.split('\n'))) @@ -866,6 +892,9 @@ def main(): forcedversion = EASYBUILD_BOOTSTRAP_FORCE_VERSION if forcedversion: info("Forcing specified version %s..." % forcedversion) + if IS_PY3 and LooseVersion(forcedversion) < LooseVersion('4'): + error('Python 3 support is only available with EasyBuild 4.x but you are trying to install EasyBuild %s' + % forcedversion) # create temporary dir for temporary installations tmpdir = tempfile.mkdtemp() @@ -982,10 +1011,12 @@ def main(): """ # check Python version -if sys.version_info[0] != 2 or sys.version_info[1] < 6: - pyver = sys.version.split(' ')[0] - sys.stderr.write("ERROR: Incompatible Python version: %s (should be Python 2 >= 2.6)\n" % pyver) - sys.stderr.write("Please try again using 'python2 %s '\n" % os.path.basename(__file__)) +loose_pyver = LooseVersion(python_version()) +min_pyver2 = LooseVersion('2.6') +min_pyver3 = LooseVersion('3.5') +if loose_pyver < min_pyver2 or (loose_pyver >= LooseVersion('3') and loose_pyver < min_pyver3): + sys.stderr.write("ERROR: Incompatible Python version: %s (should be Python 2 >= %s or Python 3 >= %s)\n" + % (python_version(), min_pyver2, min_pyver3)) sys.exit(1) # distribute_setup.py script (https://pypi.python.org/pypi/distribute) @@ -1117,8 +1148,10 @@ def main(): T4E5Gl7wpTxDXdQtzS1Hv52qHSilmOtEVO3IVjCdl5cgC5VC9T6CY1N4U4B0E1tltaqRtuYc/PyB i9tGe6+O/V0LCkGXvNkrKK2++u9qLFyTkO2sp7xSt/Bfil9os3SeOlY5fvv9mLcFj5zSNUqsRZfU 7lwukTHLpfpLDH2GT+yCCf8D2cp1xw== - -""".decode("base64").decode("zlib") +""" +if IS_PY3: + DISTRIBUTE_SETUP_PY = DISTRIBUTE_SETUP_PY.encode('ascii') +DISTRIBUTE_SETUP_PY = codecs.decode(codecs.decode(DISTRIBUTE_SETUP_PY, "base64"), "zlib") # run main function as body of script main() diff --git a/easybuild/scripts/install-EasyBuild-develop.sh b/easybuild/scripts/install-EasyBuild-develop.sh index b5ea2eb3d1..4181d8c42a 100755 --- a/easybuild/scripts/install-EasyBuild-develop.sh +++ b/easybuild/scripts/install-EasyBuild-develop.sh @@ -28,20 +28,11 @@ github_clone_branch() echo "=== Cloning ${GITHUB_USERNAME}/${REPO} ..." git clone --branch "${BRANCH}" "git@github.com:${GITHUB_USERNAME}/${REPO}.git" - if [[ "$REPO" == "vsc"* ]] - then - echo "=== Adding and fetching HPC-UGent GitHub repository @ hpcugent/${REPO} ..." - cd "${REPO}" - git remote add "github_hpcugent" "git@github.com:hpcugent/${REPO}.git" - git fetch github_hpcugent - git branch --set-upstream-to "github_hpcugent/${BRANCH}" "${BRANCH}" - else - echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." - cd "${REPO}" - git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" - git fetch github_easybuilders - git branch --set-upstream-to "github_easybuilders/${BRANCH}" "${BRANCH}" - fi + echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." + cd "${REPO}" + git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" + git fetch github_easybuilders + git branch --set-upstream-to "github_easybuilders/${BRANCH}" "${BRANCH}" } # Print the content of the module @@ -72,8 +63,6 @@ conflict EasyBuild prepend-path PATH "\$root/easybuild-framework" -prepend-path PYTHONPATH "\$root/vsc-base/lib" -prepend-path PYTHONPATH "\$root/vsc-install/lib" prepend-path PYTHONPATH "\$root/easybuild-framework" prepend-path PYTHONPATH "\$root/easybuild-easyblocks" prepend-path PYTHONPATH "\$root/easybuild-easyconfigs" @@ -112,10 +101,6 @@ mkdir -p "${INSTALL_DIR}" cd "${INSTALL_DIR}" INSTALL_DIR="${PWD}" # get the full path -# Clone repository for vsc-base dependency with 'master' branch -github_clone_branch "vsc-base" "master" -github_clone_branch "vsc-install" "master" - # Clone code repositories with the 'develop' branch github_clone_branch "easybuild-framework" "develop" github_clone_branch "easybuild-easyblocks" "develop" diff --git a/easybuild/scripts/install-EasyBuild-sprint.sh b/easybuild/scripts/install-EasyBuild-sprint.sh new file mode 100755 index 0000000000..57a0a802db --- /dev/null +++ b/easybuild/scripts/install-EasyBuild-sprint.sh @@ -0,0 +1,143 @@ +#!/usr/bin/env bash + +# Stop in case of error +set -e + +########################### +# Helpers functions +########################### + +# Print script help +print_usage() +{ + echo "Usage: $0 " + echo + echo " github_username: username on GitHub for which the EasyBuild repositories should be cloned" + echo + echo " install_dir: directory were all the EasyBuild files will be installed" + echo + echo " easyconfigs_branch: easybuild-easyconfigs branch to check out" + echo +} + +# Clone one branch +github_clone_branch() +{ + REPO="$1" + BRANCH="$2" + + cd "${INSTALL_DIR}" + + # Check if BRANCH already exists in the ${GITHUB_USRENAME}/${REPO} + if [[ ! -z $(git ls-remote --heads "git@github.com:${GITHUB_USERNAME}/${REPO}.git" "${BRANCH}") ]]; then + echo "=== Cloning ${GITHUB_USERNAME}/${REPO} branch ${BRANCH} ..." + git clone --branch "${BRANCH}" "git@github.com:${GITHUB_USERNAME}/${REPO}.git" + + echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." + cd "${REPO}" + git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" + git fetch github_easybuilders + git branch --set-upstream-to "github_easybuilders/${BRANCH}" "${BRANCH}" + else + echo "=== Cloning ${GITHUB_USERNAME}/${REPO} ..." + git clone "git@github.com:${GITHUB_USERNAME}/${REPO}.git" + + echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." + cd "${REPO}" + git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" + git fetch github_easybuilders + git checkout -b "${BRANCH}" "github_easybuilders/${BRANCH}" + fi +} + +# Print the content of the module +print_devel_module() +{ +cat < "${EB_DEVEL_MODULE}" +echo +echo "=== Run 'module use ${MODULES_INSTALL_DIR}' and 'module load ${EB_DEVEL_MODULE_NAME}' to use your development version of EasyBuild." +echo "=== (you can append ${MODULES_INSTALL_DIR} to your MODULEPATH to make this module always available for loading)" +echo +echo "=== To update each repository, run 'git pull origin' in each subdirectory of ${INSTALL_DIR}" +echo + +exit 0 + + diff --git a/easybuild/toolchains/mpi/intelmpi.py b/easybuild/toolchains/mpi/intelmpi.py index e404bf72f8..b4811754dc 100644 --- a/easybuild/toolchains/mpi/intelmpi.py +++ b/easybuild/toolchains/mpi/intelmpi.py @@ -29,7 +29,11 @@ :author: Kenneth Hoste (Ghent University) """ +import os + import easybuild.tools.toolchain as toolchain + +from distutils.version import LooseVersion from easybuild.toolchains.mpi.mpich2 import Mpich2 from easybuild.tools.toolchain.constants import COMPILER_FLAGS, COMPILER_VARIABLES from easybuild.tools.toolchain.variables import CommandFlagList @@ -67,6 +71,23 @@ def _set_mpi_compiler_variables(self): super(IntelMPI, self)._set_mpi_compiler_variables() + def _set_mpi_variables(self): + """Set the other MPI variables""" + + super(IntelMPI, self)._set_mpi_variables() + + if (LooseVersion(self.version) >= LooseVersion('2019')): + lib_dir = [os.path.join('intel64', 'lib', 'release')] + incl_dir = [os.path.join('intel64', 'include')] + + for root in self.get_software_root(self.MPI_MODULE_NAME): + self.variables.append_exists('MPI_LIB_STATIC', root, lib_dir, + filename="lib%s.a" % self.MPI_LIBRARY_NAME) + self.variables.append_exists('MPI_LIB_SHARED', root, lib_dir, + filename="lib%s.so" % self.MPI_LIBRARY_NAME) + self.variables.append_exists('MPI_LIB_DIR', root, lib_dir) + self.variables.append_exists('MPI_INC_DIR', root, incl_dir) + MPI_LINK_INFO_OPTION = '-show' def set_variables(self): diff --git a/easybuild/tools/build_log.py b/easybuild/tools/build_log.py index 616c839531..ba45075069 100644 --- a/easybuild/tools/build_log.py +++ b/easybuild/tools/build_log.py @@ -358,10 +358,13 @@ def print_warning(msg, *args, **kwargs): if args: msg = msg % args + log = kwargs.pop('log', None) silent = kwargs.pop('silent', False) if kwargs: raise EasyBuildError("Unknown named arguments passed to print_warning: %s", kwargs) + if log: + log.warning(msg) if not silent: sys.stderr.write("\nWARNING: %s\n\n" % msg) diff --git a/easybuild/tools/config.py b/easybuild/tools/config.py index ab98bcad6d..0bcf31ab8b 100644 --- a/easybuild/tools/config.py +++ b/easybuild/tools/config.py @@ -78,6 +78,8 @@ CONT_TYPES = [CONT_TYPE_DOCKER, CONT_TYPE_SINGULARITY] DEFAULT_CONT_TYPE = CONT_TYPE_SINGULARITY +DEFAULT_BRANCH = 'develop' +DEFAULT_INDEX_MAX_AGE = 7 * 24 * 60 * 60 # 1 week (in seconds) DEFAULT_JOB_BACKEND = 'GC3Pie' DEFAULT_LOGFILE_FORMAT = ("easybuild", "easybuild-%(name)s-%(version)s-%(date)s.%(time)s.log") DEFAULT_MAX_FAIL_RATIO_PERMS = 0.5 @@ -111,6 +113,9 @@ FORCE_DOWNLOAD_CHOICES = [FORCE_DOWNLOAD_ALL, FORCE_DOWNLOAD_PATCHES, FORCE_DOWNLOAD_SOURCES] DEFAULT_FORCE_DOWNLOAD = FORCE_DOWNLOAD_SOURCES +# package name for generic easyblocks +GENERIC_EASYBLOCK_PKG = 'generic' + # general module class GENERAL_CLASS = 'all' @@ -184,6 +189,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'job_output_dir', 'job_polling_interval', 'job_target_resource', + 'locks_dir', 'modules_footer', 'modules_header', 'mpi_cmd_template', @@ -195,7 +201,6 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'pr_commit_msg', 'pr_descr', 'pr_target_account', - 'pr_target_branch', 'pr_target_repo', 'pr_title', 'rpath_filter', @@ -225,6 +230,8 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'group_writable_installdir', 'hidden', 'ignore_checksums', + 'ignore_index', + 'ignore_locks', 'install_latest_eb_release', 'lib64_fallback_sanity_check', 'logtostdout', @@ -249,6 +256,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'use_f90cache', 'use_existing_modules', 'set_default_module', + 'wait_on_lock', ], True: [ 'cleanup_builddir', @@ -270,6 +278,12 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): DEFAULT_CONT_TYPE: [ 'container_type', ], + DEFAULT_BRANCH: [ + 'pr_target_branch', + ], + DEFAULT_INDEX_MAX_AGE: [ + 'index_max_age', + ], DEFAULT_MAX_FAIL_RATIO_PERMS: [ 'max_fail_ratio_adjust_permissions', ], diff --git a/easybuild/tools/containers/docker.py b/easybuild/tools/containers/docker.py index bb5c2eb09e..0da773a269 100644 --- a/easybuild/tools/containers/docker.py +++ b/easybuild/tools/containers/docker.py @@ -34,7 +34,7 @@ from easybuild.tools.config import DOCKER_BASE_IMAGE_CENTOS, DOCKER_BASE_IMAGE_UBUNTU from easybuild.tools.containers.base import ContainerGenerator from easybuild.tools.containers.utils import det_os_deps -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.run import run_cmd @@ -157,4 +157,4 @@ def build_image(self, dockerfile): run_cmd(docker_cmd, path=tempdir, stream_output=True) print_msg("Docker image created at %s" % container_name, log=self.log) - rmtree2(tempdir) + remove_dir(tempdir) diff --git a/easybuild/tools/filetools.py b/easybuild/tools/filetools.py index e414ed68a7..20b8cd6335 100644 --- a/easybuild/tools/filetools.py +++ b/easybuild/tools/filetools.py @@ -40,9 +40,12 @@ """ import datetime import difflib +import distutils.dir_util import fileinput import glob import hashlib +import imp +import inspect import os import re import shutil @@ -56,10 +59,10 @@ from easybuild.base import fancylogger from easybuild.tools import run # import build_log must stay, to use of EasyBuildLog -from easybuild.tools.build_log import EasyBuildError, dry_run_msg, print_msg -from easybuild.tools.config import build_option +from easybuild.tools.build_log import EasyBuildError, dry_run_msg, print_msg, print_warning +from easybuild.tools.config import GENERIC_EASYBLOCK_PKG, build_option from easybuild.tools.py2vs3 import std_urllib, string_type -from easybuild.tools.utilities import nub +from easybuild.tools.utilities import nub, remove_unwanted_chars try: import requests @@ -109,6 +112,7 @@ r'~': "_tilde_", } +PATH_INDEX_FILENAME = '.eb-path-index' CHECKSUM_TYPE_MD5 = 'md5' CHECKSUM_TYPE_SHA256 = 'sha256' @@ -241,6 +245,13 @@ def write_file(path, data, append=False, forced=False, backup=False, always_over raise EasyBuildError("Failed to write to %s: %s", path, err) +def is_binary(contents): + """ + Check whether given bytestring represents the contents of a binary file or not. + """ + return isinstance(contents, bytes) and b'\00' in bytes(contents) + + def resolve_path(path): """ Return fully resolved path for given path. @@ -296,11 +307,27 @@ def remove_dir(path): dry_run_msg("directory %s removed" % path, silent=build_option('silent')) return - try: - if os.path.exists(path): - rmtree2(path) - except OSError as err: - raise EasyBuildError("Failed to remove directory %s: %s", path, err) + if os.path.exists(path): + ok = False + errors = [] + # Try multiple times to cater for temporary failures on e.g. NFS mounted paths + max_attempts = 3 + for i in range(0, max_attempts): + try: + shutil.rmtree(path) + ok = True + break + except OSError as err: + _log.debug("Failed to remove path %s with shutil.rmtree at attempt %d: %s" % (path, i, err)) + errors.append(err) + time.sleep(2) + # make sure write permissions are enabled on entire directory + adjust_permissions(path, stat.S_IWUSR, add=True, recursive=True) + if ok: + _log.info("Path %s successfully removed." % path) + else: + raise EasyBuildError("Failed to remove directory %s even after %d attempts.\nReasons: %s", + path, max_attempts, errors) def remove(paths): @@ -589,6 +616,120 @@ def download_file(filename, url, path, forced=False): return None +def create_index(path, ignore_dirs=None): + """ + Create index for files in specified path. + """ + if ignore_dirs is None: + ignore_dirs = [] + + index = set() + + if not os.path.exists(path): + raise EasyBuildError("Specified path does not exist: %s", path) + elif not os.path.isdir(path): + raise EasyBuildError("Specified path is not a directory: %s", path) + + for (dirpath, dirnames, filenames) in os.walk(path, topdown=True, followlinks=True): + for filename in filenames: + # use relative paths in index + rel_dirpath = os.path.relpath(dirpath, path) + # avoid that relative paths start with './' + if rel_dirpath == '.': + rel_dirpath = '' + index.add(os.path.join(rel_dirpath, filename)) + + # do not consider (certain) hidden directories + # note: we still need to consider e.g., .local ! + # replace list elements using [:], so os.walk doesn't process deleted directories + # see https://stackoverflow.com/questions/13454164/os-walk-without-hidden-folders + dirnames[:] = [d for d in dirnames if d not in ignore_dirs] + + return index + + +def dump_index(path, max_age_sec=None): + """ + Create index for files in specified path, and dump it to file (alphabetically sorted). + """ + if max_age_sec is None: + max_age_sec = build_option('index_max_age') + + index_fp = os.path.join(path, PATH_INDEX_FILENAME) + index_contents = create_index(path) + + curr_ts = datetime.datetime.now() + if max_age_sec == 0: + end_ts = datetime.datetime.max + else: + end_ts = curr_ts + datetime.timedelta(0, max_age_sec) + + lines = [ + "# created at: %s" % str(curr_ts), + "# valid until: %s" % str(end_ts), + ] + lines.extend(sorted(index_contents)) + + write_file(index_fp, '\n'.join(lines), always_overwrite=False) + + return index_fp + + +def load_index(path, ignore_dirs=None): + """ + Load index for specified path, and return contents (or None if no index exists). + """ + if ignore_dirs is None: + ignore_dirs = [] + + index_fp = os.path.join(path, PATH_INDEX_FILENAME) + index = set() + + if build_option('ignore_index'): + _log.info("Ignoring index for %s...", path) + + elif os.path.exists(index_fp): + lines = read_file(index_fp).splitlines() + + valid_ts_regex = re.compile("^# valid until: (.*)", re.M) + valid_ts = None + + for line in lines: + + # extract "valid until" timestamp, so we can check whether index is still valid + if valid_ts is None: + res = valid_ts_regex.match(line) + else: + res = None + + if res: + valid_ts = res.group(1) + try: + valid_ts = datetime.datetime.strptime(valid_ts, '%Y-%m-%d %H:%M:%S.%f') + except ValueError as err: + raise EasyBuildError("Failed to parse timestamp '%s' for index at %s: %s", valid_ts, path, err) + + elif line.startswith('#'): + _log.info("Ignoring unknown header line '%s' in index for %s", line, path) + + else: + # filter out files that are in an ignored directory + path_dirs = line.split(os.path.sep)[:-1] + if not any(d in path_dirs for d in ignore_dirs): + index.add(line) + + # check whether index is still valid + if valid_ts: + curr_ts = datetime.datetime.now() + if curr_ts > valid_ts: + print_warning("Index for %s is no longer valid (too old), so ignoring it...", path) + index = None + else: + print_msg("found valid index for %s, so using it...", path) + + return index or None + + def find_easyconfigs(path, ignore_dirs=None): """ Find .eb easyconfig files in path @@ -617,7 +758,8 @@ def find_easyconfigs(path, ignore_dirs=None): return files -def search_file(paths, query, short=False, ignore_dirs=None, silent=False, filename_only=False, terse=False): +def search_file(paths, query, short=False, ignore_dirs=None, silent=False, filename_only=False, terse=False, + case_sensitive=False): """ Search for files using in specified paths using specified search query (regular expression) @@ -641,7 +783,11 @@ def search_file(paths, query, short=False, ignore_dirs=None, silent=False, filen # compile regex, case-insensitive try: - query = re.compile(query, re.I) + if case_sensitive: + query = re.compile(query) + else: + # compile regex, case-insensitive + query = re.compile(query, re.I) except re.error as err: raise EasyBuildError("Invalid search query: %s", err) @@ -654,22 +800,26 @@ def search_file(paths, query, short=False, ignore_dirs=None, silent=False, filen if not terse: print_msg("Searching (case-insensitive) for '%s' in %s " % (query.pattern, path), log=_log, silent=silent) - for (dirpath, dirnames, filenames) in os.walk(path, topdown=True): - for filename in filenames: - if query.search(filename): - if not path_hits: - var = "CFGS%d" % var_index - var_index += 1 - if filename_only: - path_hits.append(filename) - else: - path_hits.append(os.path.join(dirpath, filename)) - - # do not consider (certain) hidden directories - # note: we still need to consider e.g., .local ! - # replace list elements using [:], so os.walk doesn't process deleted directories - # see http://stackoverflow.com/questions/13454164/os-walk-without-hidden-folders - dirnames[:] = [d for d in dirnames if d not in ignore_dirs] + path_index = load_index(path, ignore_dirs=ignore_dirs) + if path_index is None or build_option('ignore_index'): + if os.path.exists(path): + _log.info("No index found for %s, creating one...", path) + path_index = create_index(path, ignore_dirs=ignore_dirs) + else: + path_index = [] + else: + _log.info("Index found for %s, so using it...", path) + + for filepath in path_index: + filename = os.path.basename(filepath) + if query.search(filename): + if not path_hits: + var = "CFGS%d" % var_index + var_index += 1 + if filename_only: + path_hits.append(filename) + else: + path_hits.append(os.path.join(path, filepath)) path_hits = sorted(path_hits) @@ -944,7 +1094,9 @@ def det_patched_files(path=None, txt=None, omit_ab_prefix=False, github=False, f patched_regex = re.compile(patched_regex, re.M) if path is not None: - txt = read_file(path) + # take into account that file may contain non-UTF-8 characters; + # so, read a byte string, and decode to UTF-8 string (ignoring any non-UTF-8 characters); + txt = read_file(path, mode='rb').decode('utf-8', 'replace') elif txt is None: raise EasyBuildError("Either a file path or a string representing a patch should be supplied") @@ -1374,22 +1526,8 @@ def path_matches(path, paths): def rmtree2(path, n=3): """Wrapper around shutil.rmtree to make it more robust when used on NFS mounted file systems.""" - ok = False - for i in range(0, n): - try: - shutil.rmtree(path) - ok = True - break - except OSError as err: - _log.debug("Failed to remove path %s with shutil.rmtree at attempt %d: %s" % (path, n, err)) - time.sleep(2) - - # make sure write permissions are enabled on entire directory - adjust_permissions(path, stat.S_IWUSR, add=True, recursive=True) - if not ok: - raise EasyBuildError("Failed to remove path %s with shutil.rmtree, even after %d attempts.", path, n) - else: - _log.info("Path %s successfully removed." % path) + _log.deprecated("Use 'remove_dir' rather than 'rmtree2'", '5.0') + remove_dir(path) def find_backup_name_candidate(src_file): @@ -1797,24 +1935,51 @@ def copy_files(paths, target_dir, force_in_dry_run=False): copy_file(path, target_dir) -def copy_dir(path, target_path, force_in_dry_run=False, **kwargs): +def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **kwargs): """ Copy a directory from specified location to specified location :param path: the original directory path :param target_path: path to copy the directory to :param force_in_dry_run: force running the command during dry run + :param dirs_exist_ok: wrapper around shutil.copytree option, which was added in Python 3.8 + + On Python >= 3.8 shutil.copytree is always used + On Python < 3.8 if 'dirs_exist_ok' is False - shutil.copytree is used + On Python < 3.8 if 'dirs_exist_ok' is True - distutils.dir_util.copy_tree is used - Additional specified named arguments are passed down to shutil.copytree + Additional specified named arguments are passed down to shutil.copytree if used. + + Because distutils.dir_util.copy_tree supports only 'symlinks' named argument, + using any other will raise EasyBuildError. """ if not force_in_dry_run and build_option('extended_dry_run'): dry_run_msg("copied directory %s to %s" % (path, target_path)) else: try: - if os.path.exists(target_path): + if not dirs_exist_ok and os.path.exists(target_path): raise EasyBuildError("Target location %s to copy %s to already exists", target_path, path) - shutil.copytree(path, target_path, **kwargs) + if sys.version_info >= (3, 8): + # on Python >= 3.8, shutil.copytree works fine, thanks to availability of dirs_exist_ok named argument + shutil.copytree(path, target_path, dirs_exist_ok=dirs_exist_ok, **kwargs) + + elif dirs_exist_ok: + # use distutils.dir_util.copy_tree with Python < 3.8 if dirs_exist_ok is enabled + + # first get value for symlinks named argument (if any) + preserve_symlinks = kwargs.pop('symlinks', False) + + # check if there are other named arguments (there shouldn't be, only 'symlinks' is supported) + if kwargs: + raise EasyBuildError("Unknown named arguments passed to copy_dir with dirs_exist_ok=True: %s", + ', '.join(sorted(kwargs.keys()))) + distutils.dir_util.copy_tree(path, target_path, preserve_symlinks=preserve_symlinks) + + else: + # if dirs_exist_ok is not enabled, just use shutil.copytree + shutil.copytree(path, target_path, **kwargs) + _log.info("%s copied to %s", path, target_path) except (IOError, OSError) as err: raise EasyBuildError("Failed to copy directory %s to %s: %s", path, target_path, err) @@ -1864,6 +2029,7 @@ def get_source_tarball_from_git(filename, targetdir, git_config): repo_name = git_config.pop('repo_name', None) commit = git_config.pop('commit', None) recursive = git_config.pop('recursive', False) + keep_git_dir = git_config.pop('keep_git_dir', False) # input validation of git_config dict if git_config: @@ -1912,7 +2078,10 @@ def get_source_tarball_from_git(filename, targetdir, git_config): run.run_cmd(' '.join(checkout_cmd), log_all=True, log_ok=False, simple=False, regexp=False, path=repo_name) # create an archive and delete the git repo directory - tar_cmd = ['tar', 'cfvz', targetpath, '--exclude', '.git', repo_name] + if keep_git_dir: + tar_cmd = ['tar', 'cfvz', targetpath, repo_name] + else: + tar_cmd = ['tar', 'cfvz', targetpath, '--exclude', '.git', repo_name] run.run_cmd(' '.join(tar_cmd), log_all=True, log_ok=False, simple=False, regexp=False) # cleanup (repo_name dir does not exist in dry run mode) @@ -1996,3 +2165,94 @@ def install_fake_vsc(): sys.path.insert(0, fake_vsc_path) return fake_vsc_path + + +def get_easyblock_class_name(path): + """Make sure file is an easyblock and get easyblock class name""" + fn = os.path.basename(path).split('.')[0] + mod = imp.load_source(fn, path) + clsmembers = inspect.getmembers(mod, inspect.isclass) + for cn, co in clsmembers: + if co.__module__ == mod.__name__: + ancestors = inspect.getmro(co) + if any(a.__name__ == 'EasyBlock' for a in ancestors): + return cn + return None + + +def is_generic_easyblock(easyblock): + """Return whether specified easyblock name is a generic easyblock or not.""" + + return easyblock and not easyblock.startswith(EASYBLOCK_CLASS_PREFIX) + + +def copy_easyblocks(paths, target_dir): + """ Find right location for easyblock file and copy it there""" + file_info = { + 'eb_names': [], + 'paths_in_repo': [], + 'new': [], + } + + subdir = os.path.join('easybuild', 'easyblocks') + if os.path.exists(os.path.join(target_dir, subdir)): + for path in paths: + cn = get_easyblock_class_name(path) + if not cn: + raise EasyBuildError("Could not determine easyblock class from file %s" % path) + + eb_name = remove_unwanted_chars(decode_class_name(cn).replace('-', '_')).lower() + + if is_generic_easyblock(cn): + pkgdir = GENERIC_EASYBLOCK_PKG + else: + pkgdir = eb_name[0] + + target_path = os.path.join(subdir, pkgdir, eb_name + '.py') + + full_target_path = os.path.join(target_dir, target_path) + file_info['eb_names'].append(eb_name) + file_info['paths_in_repo'].append(full_target_path) + file_info['new'].append(not os.path.exists(full_target_path)) + copy_file(path, full_target_path, force_in_dry_run=True) + + else: + raise EasyBuildError("Could not find %s subdir in %s", subdir, target_dir) + + return file_info + + +def copy_framework_files(paths, target_dir): + """ Find right location for framework file and copy it there""" + file_info = { + 'paths_in_repo': [], + 'new': [], + } + + paths = [os.path.abspath(path) for path in paths] + + framework_topdir = 'easybuild-framework' + + for path in paths: + target_path = None + dirnames = os.path.dirname(path).split(os.path.sep) + + if framework_topdir in dirnames: + # construct subdirectory by grabbing last entry in dirnames until we hit 'easybuild-framework' dir + subdirs = [] + while(dirnames[-1] != framework_topdir): + subdirs.insert(0, dirnames.pop()) + + parent_dir = os.path.join(*subdirs) if subdirs else '' + target_path = os.path.join(target_dir, parent_dir, os.path.basename(path)) + else: + raise EasyBuildError("Specified path '%s' does not include a '%s' directory!", path, framework_topdir) + + if target_path: + file_info['paths_in_repo'].append(target_path) + file_info['new'].append(not os.path.exists(target_path)) + copy_file(path, target_path) + else: + raise EasyBuildError("Couldn't find parent folder of updated file: %s", path) + + return file_info diff --git a/easybuild/tools/github.py b/easybuild/tools/github.py index 77dd8da0f5..9eb9219dd8 100644 --- a/easybuild/tools/github.py +++ b/easybuild/tools/github.py @@ -50,8 +50,9 @@ from easybuild.framework.easyconfig.parser import EasyConfigParser from easybuild.tools.build_log import EasyBuildError, print_msg, print_warning from easybuild.tools.config import build_option -from easybuild.tools.filetools import apply_patch, copy_dir, det_patched_files, download_file, extract_file -from easybuild.tools.filetools import mkdir, read_file, symlink, which, write_file +from easybuild.tools.filetools import apply_patch, copy_dir, copy_easyblocks, copy_framework_files +from easybuild.tools.filetools import det_patched_files, download_file, extract_file +from easybuild.tools.filetools import get_easyblock_class_name, mkdir, read_file, symlink, which, write_file from easybuild.tools.py2vs3 import HTTPError, URLError, ascii_letters, urlopen from easybuild.tools.systemtools import UNKNOWN, get_tool_version from easybuild.tools.utilities import nub, only_if_module_is_available @@ -85,7 +86,9 @@ GITHUB_API_URL = 'https://api.github.com' GITHUB_DIR_TYPE = u'dir' GITHUB_EB_MAIN = 'easybuilders' +GITHUB_EASYBLOCKS_REPO = 'easybuild-easyblocks' GITHUB_EASYCONFIGS_REPO = 'easybuild-easyconfigs' +GITHUB_FRAMEWORK_REPO = 'easybuild-framework' GITHUB_DEVELOP_BRANCH = 'develop' GITHUB_FILE_TYPE = u'file' GITHUB_PR_STATE_OPEN = 'open' @@ -251,7 +254,7 @@ def github_api_get_request(request_f, github_user=None, token=None, **kwargs): _log.warning("Error occurred while performing get request: %s", err) status, data = 0, None - _log.debug("get request result for %s: status: %d, data: %s", url, status, data) + _log.debug("get request result for %s: status: %d, data: %s", url.url, status, data) return (status, data) @@ -284,7 +287,7 @@ def github_api_put_request(request_f, github_user=None, token=None, **kwargs): else: raise EasyBuildError("FAILED: %s", data.get('message', "(unknown reason)")) - _log.debug("get request result for %s: status: %d, data: %s", url, status, data) + _log.debug("get request result for %s: status: %d, data: %s", url.url, status, data) return (status, data) @@ -369,13 +372,32 @@ def download_repo(repo=GITHUB_EASYCONFIGS_REPO, branch='master', account=GITHUB_ return extracted_path +def fetch_easyblocks_from_pr(pr, path=None, github_user=None): + """Fetch patched easyconfig files for a particular PR.""" + return fetch_files_from_pr(pr, path, github_user, github_repo=GITHUB_EASYBLOCKS_REPO) + + def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): """Fetch patched easyconfig files for a particular PR.""" + return fetch_files_from_pr(pr, path, github_user, github_repo=GITHUB_EASYCONFIGS_REPO) + + +def fetch_files_from_pr(pr, path=None, github_user=None, github_repo=None): + """Fetch patched files for a particular PR.""" if github_user is None: github_user = build_option('github_user') + + if github_repo is None: + github_repo = GITHUB_EASYCONFIGS_REPO + if path is None: - path = build_option('pr_path') + if github_repo == GITHUB_EASYCONFIGS_REPO: + path = build_option('pr_path') + elif github_repo == GITHUB_EASYBLOCKS_REPO: + path = os.path.join(tempfile.gettempdir(), 'ebs_pr%s' % pr) + else: + raise EasyBuildError("Unknown repo: %s" % github_repo) if path is None: path = tempfile.mkdtemp() @@ -384,9 +406,17 @@ def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): mkdir(path, parents=True) github_account = build_option('pr_target_account') - github_repo = GITHUB_EASYCONFIGS_REPO - _log.debug("Fetching easyconfigs from %s/%s PR #%s into %s", github_account, github_repo, pr, path) + if github_repo == GITHUB_EASYCONFIGS_REPO: + easyfiles = 'easyconfigs' + elif github_repo == GITHUB_EASYBLOCKS_REPO: + easyfiles = 'easyblocks' + else: + raise EasyBuildError("Don't know how to fetch files from repo %s", github_repo) + + subdir = os.path.join('easybuild', easyfiles) + + _log.debug("Fetching %s from %s/%s PR #%s into %s", easyfiles, github_account, github_repo, pr, path) pr_data, _ = fetch_pr_data(pr, github_account, github_repo, github_user) pr_merged = pr_data['merged'] @@ -429,12 +459,12 @@ def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): if final_path is None: if pr_closed: - print_warning("Using easyconfigs from closed PR #%s" % pr) + print_warning("Using %s from closed PR #%s" % (easyfiles, pr)) # obtain most recent version of patched files - for patched_file in patched_files: + for patched_file in [f for f in patched_files if subdir in f]: # path to patch file, incl. subdir it is in - fn = os.path.sep.join(patched_file.split(os.path.sep)[-3:]) + fn = patched_file.split(subdir)[1].strip(os.path.sep) sha = pr_data['head']['sha'] full_url = URL_SEPARATOR.join([GITHUB_RAW, github_account, github_repo, sha, patched_file]) _log.info("Downloading %s from %s", fn, full_url) @@ -444,25 +474,28 @@ def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): # symlink directories into expected place if they're not there yet if final_path != path: - dirpath = os.path.join(final_path, 'easybuild', 'easyconfigs') + dirpath = os.path.join(final_path, subdir) for eb_dir in os.listdir(dirpath): symlink(os.path.join(dirpath, eb_dir), os.path.join(path, os.path.basename(eb_dir))) # sanity check: make sure all patched files are downloaded - ec_files = [] - for patched_file in [f for f in patched_files if not f.startswith('test/')]: - fn = os.path.sep.join(patched_file.split(os.path.sep)[-3:]) + files = [] + for patched_file in [f for f in patched_files if subdir in f]: + fn = patched_file.split(easyfiles)[1].strip(os.path.sep) full_path = os.path.join(path, fn) if os.path.exists(full_path): - ec_files.append(full_path) + files.append(full_path) else: raise EasyBuildError("Couldn't find path to patched file %s", full_path) - return ec_files + return files def create_gist(txt, fn, descr=None, github_user=None, github_token=None): """Create a gist with the provided text.""" + + dry_run = build_option('dry_run') or build_option('extended_dry_run') + if descr is None: descr = "(none)" @@ -478,8 +511,12 @@ def create_gist(txt, fn, descr=None, github_user=None, github_token=None): } } } - g = RestClient(GITHUB_API_URL, username=github_user, token=github_token) - status, data = g.gists.post(body=body) + + if dry_run: + status, data = HTTP_STATUS_CREATED, {'html_url': 'https://gist.github.com/DRY_RUN'} + else: + g = RestClient(GITHUB_API_URL, username=github_user, token=github_token) + status, data = g.gists.post(body=body) if status != HTTP_STATUS_CREATED: raise EasyBuildError("Failed to create gist; status %s, data: %s", status, data) @@ -673,8 +710,8 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ # we need files to create the PR with non_existing_paths = [] ec_paths = [] - if paths['easyconfigs']: - for path in paths['easyconfigs']: + if paths['easyconfigs'] or paths['py_files']: + for path in paths['easyconfigs'] + paths['py_files']: if not os.path.exists(path): non_existing_paths.append(path) else: @@ -686,14 +723,16 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ if not any(paths.values()): raise EasyBuildError("No paths specified") - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = det_pr_target_repo(paths) + if pr_target_repo is None: + raise EasyBuildError("Failed to determine target repository, please specify it via --pr-target-repo!") # initialize repository git_working_dir = tempfile.mkdtemp(prefix='git-working-dir') git_repo = init_repo(git_working_dir, pr_target_repo) repo_path = os.path.join(git_working_dir, pr_target_repo) - if pr_target_repo != GITHUB_EASYCONFIGS_REPO: + if pr_target_repo not in [GITHUB_EASYCONFIGS_REPO, GITHUB_EASYBLOCKS_REPO, GITHUB_FRAMEWORK_REPO]: raise EasyBuildError("Don't know how to create/update a pull request to the %s repository", pr_target_repo) if start_account is None: @@ -717,21 +756,23 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ # copy easyconfig files to right place target_dir = os.path.join(git_working_dir, pr_target_repo) - print_msg("copying easyconfigs to %s..." % target_dir) - file_info = copy_easyconfigs(ec_paths, target_dir) + print_msg("copying files to %s..." % target_dir) + file_info = COPY_FUNCTIONS[pr_target_repo](ec_paths, os.path.join(git_working_dir, pr_target_repo)) # figure out commit message to use if commit_msg: cnt = len(file_info['paths_in_repo']) - _log.debug("Using specified commit message for all %d new/modified easyconfigs at once: %s", cnt, commit_msg) - elif all(file_info['new']) and not paths['files_to_delete']: + _log.debug("Using specified commit message for all %d new/modified files at once: %s", cnt, commit_msg) + elif pr_target_repo == GITHUB_EASYCONFIGS_REPO and all(file_info['new']) and not paths['files_to_delete']: # automagically derive meaningful commit message if all easyconfig files are new commit_msg = "adding easyconfigs: %s" % ', '.join(os.path.basename(p) for p in file_info['paths_in_repo']) if paths['patch_files']: commit_msg += " and patches: %s" % ', '.join(os.path.basename(p) for p in paths['patch_files']) + elif pr_target_repo == GITHUB_EASYBLOCKS_REPO and all(file_info['new']): + commit_msg = "adding easyblocks: %s" % ', '.join(os.path.basename(p) for p in file_info['paths_in_repo']) else: raise EasyBuildError("A meaningful commit message must be specified via --pr-commit-msg when " - "modifying/deleting easyconfigs") + "modifying/deleting files or targeting the framework repo.") # figure out to which software name patches relate, and copy them to the right place if paths['patch_files']: @@ -776,7 +817,7 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ # checkout target branch if pr_branch is None: - if ec_paths: + if ec_paths and pr_target_repo == GITHUB_EASYCONFIGS_REPO: label = file_info['ecs'][0].name + re.sub('[.-]', '', file_info['ecs'][0].version) else: label = ''.join(random.choice(ascii_letters) for _ in range(10)) @@ -815,7 +856,7 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ push_branch_to_github(git_repo, target_account, pr_target_repo, pr_branch) - return file_info, deleted_paths, git_repo, pr_branch, diff_stat + return file_info, deleted_paths, git_repo, pr_branch, diff_stat, pr_target_repo def create_remote(git_repo, account, repo, https=False): @@ -991,9 +1032,10 @@ def not_eligible(msg): target = '%s/%s' % (pr_data['base']['repo']['owner']['login'], pr_data['base']['repo']['name']) print_msg("Checking eligibility of %s PR #%s for merging..." % (target, pr_data['number']), prefix=False) - # check target branch, must be 'develop' - msg_tmpl = "* targets develop branch: %s" - if pr_data['base']['ref'] == 'develop': + # check target branch, must be branch name specified in --pr-target-branch (usually 'develop') + pr_target_branch = build_option('pr_target_branch') + msg_tmpl = "* targets %s branch: %%s" % pr_target_branch + if pr_data['base']['ref'] == pr_target_branch: print_msg(msg_tmpl % 'OK', prefix=False) else: res = not_eligible(msg_tmpl % "FAILED; found '%s'" % pr_data['base']['ref']) @@ -1143,7 +1185,7 @@ def close_pr(pr, motivation_msg=None): raise EasyBuildError("GitHub user must be specified to use --close-pr") pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, _ = fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user, full=True) @@ -1216,7 +1258,7 @@ def list_prs(params, per_page=GITHUB_MAX_PER_PAGE, github_user=None): print_msg("Listing PRs with parameters: %s" % ', '.join(k + '=' + str(parameters[k]) for k in sorted(parameters))) pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, _ = fetch_pr_data(None, pr_target_account, pr_target_repo, github_user, **parameters) @@ -1236,7 +1278,7 @@ def merge_pr(pr): raise EasyBuildError("GitHub user must be specified to use --merge-pr") pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, pr_url = fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user, full=True) @@ -1276,11 +1318,10 @@ def new_branch_github(paths, ecs, commit_msg=None): """ Create new branch on GitHub using specified files - :param paths: paths to categorized lists of files (easyconfigs, files to delete, patches) + :param paths: paths to categorized lists of files (easyconfigs, files to delete, patches, files with .py extension) :param ecs: list of parsed easyconfigs, incl. for dependencies (if robot is enabled) :param commit_msg: commit message to use """ - branch_name = build_option('pr_branch_name') if commit_msg is None: commit_msg = build_option('pr_commit_msg') @@ -1292,14 +1333,15 @@ def new_branch_github(paths, ecs, commit_msg=None): @only_if_module_is_available('git', pkgname='GitPython') -def new_pr_from_branch(branch_name, title=None, descr=None, pr_metadata=None): +def new_pr_from_branch(branch_name, title=None, descr=None, pr_target_repo=None, pr_metadata=None): """ Create new pull request from specified branch on GitHub. """ pr_target_account = build_option('pr_target_account') pr_target_branch = build_option('pr_target_branch') - pr_target_repo = build_option('pr_target_repo') + if pr_target_repo is None: + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO # fetch GitHub token (required to perform actions on GitHub) github_user = build_option('github_user') @@ -1391,52 +1433,60 @@ def new_pr_from_branch(branch_name, title=None, descr=None, pr_metadata=None): file_info = det_file_info(ec_paths, target_dir) - # label easyconfigs for new software and/or new easyconfigs for existing software labels = [] - if any(file_info['new_folder']): - labels.append('new') - if any(file_info['new_file_in_existing_folder']): - labels.append('update') - - # only use most common toolchain(s) in toolchain label of PR title - toolchains = ['%(name)s/%(version)s' % ec['toolchain'] for ec in file_info['ecs']] - toolchains_counted = sorted([(toolchains.count(tc), tc) for tc in nub(toolchains)]) - toolchain_label = ','.join([tc for (cnt, tc) in toolchains_counted if cnt == toolchains_counted[-1][0]]) - - # only use most common module class(es) in moduleclass label of PR title - classes = [ec['moduleclass'] for ec in file_info['ecs']] - classes_counted = sorted([(classes.count(c), c) for c in nub(classes)]) - class_label = ','.join([tc for (cnt, tc) in classes_counted if cnt == classes_counted[-1][0]]) + if pr_target_repo == GITHUB_EASYCONFIGS_REPO: + # label easyconfigs for new software and/or new easyconfigs for existing software + if any(file_info['new_folder']): + labels.append('new') + if any(file_info['new_file_in_existing_folder']): + labels.append('update') + + # only use most common toolchain(s) in toolchain label of PR title + toolchains = ['%(name)s/%(version)s' % ec['toolchain'] for ec in file_info['ecs']] + toolchains_counted = sorted([(toolchains.count(tc), tc) for tc in nub(toolchains)]) + toolchain_label = ','.join([tc for (cnt, tc) in toolchains_counted if cnt == toolchains_counted[-1][0]]) + + # only use most common module class(es) in moduleclass label of PR title + classes = [ec['moduleclass'] for ec in file_info['ecs']] + classes_counted = sorted([(classes.count(c), c) for c in nub(classes)]) + class_label = ','.join([tc for (cnt, tc) in classes_counted if cnt == classes_counted[-1][0]]) + elif pr_target_repo == GITHUB_EASYBLOCKS_REPO: + if any(file_info['new']): + labels.append('new') if title is None: + if pr_target_repo == GITHUB_EASYCONFIGS_REPO: + if file_info['ecs'] and all(file_info['new']) and not deleted_paths: + # mention software name/version in PR title (only first 3) + names_and_versions = nub(["%s v%s" % (ec.name, ec.version) for ec in file_info['ecs']]) + if len(names_and_versions) <= 3: + main_title = ', '.join(names_and_versions) + else: + main_title = ', '.join(names_and_versions[:3] + ['...']) + + title = "{%s}[%s] %s" % (class_label, toolchain_label, main_title) + + # if Python is listed as a dependency, then mention Python version(s) in PR title + pyver = [] + for ec in file_info['ecs']: + # iterate over all dependencies (incl. build dependencies & multi-deps) + for dep in ec.dependencies(): + if dep['name'] == 'Python': + # check whether Python is listed as a multi-dep if it's marked as a build dependency + if dep['build_only'] and 'Python' not in ec['multi_deps']: + continue + else: + pyver.append(dep['version']) + if pyver: + title += " w/ Python %s" % ' + '.join(sorted(nub(pyver))) + elif pr_target_repo == GITHUB_EASYBLOCKS_REPO: + if file_info['eb_names'] and all(file_info['new']) and not deleted_paths: + plural = 's' if len(file_info['eb_names']) > 1 else '' + title = "new easyblock%s for %s" % (plural, (', '.join(file_info['eb_names']))) - if file_info['ecs'] and all(file_info['new']) and not deleted_paths: - # mention software name/version in PR title (only first 3) - names_and_versions = nub(["%s v%s" % (ec.name, ec.version) for ec in file_info['ecs']]) - if len(names_and_versions) <= 3: - main_title = ', '.join(names_and_versions) - else: - main_title = ', '.join(names_and_versions[:3] + ['...']) - - title = "{%s}[%s] %s" % (class_label, toolchain_label, main_title) - - # if Python is listed as a dependency, then mention Python version(s) in PR title - pyver = [] - for ec in file_info['ecs']: - # iterate over all dependencies (incl. build dependencies & multi-deps) - for dep in ec.dependencies(): - if dep['name'] == 'Python': - # check whether Python is listed as a multi-dep if it's marked as a build dependency - if dep['build_only'] and 'Python' not in ec['multi_deps']: - continue - else: - pyver.append(dep['version']) - if pyver: - title += " w/ Python %s" % ' + '.join(sorted(nub(pyver))) - - else: - raise EasyBuildError("Don't know how to make a PR title for this PR. " - "Please include a title (use --pr-title)") + if title is None: + raise EasyBuildError("Don't know how to make a PR title for this PR. " + "Please include a title (use --pr-title)") full_descr = "(created using `eb --new-pr`)\n" if descr is not None: @@ -1446,7 +1496,6 @@ def new_pr_from_branch(branch_name, title=None, descr=None, pr_metadata=None): pr_target_branch = build_option('pr_target_branch') dry_run = build_option('dry_run') or build_option('extended_dry_run') - pr_target_repo = build_option('pr_target_repo') msg = '\n'.join([ '', "Opening pull request%s" % ('', " [DRY RUN]")[dry_run], @@ -1510,12 +1559,13 @@ def new_pr(paths, ecs, title=None, descr=None, commit_msg=None): # create new branch in GitHub res = new_branch_github(paths, ecs, commit_msg=commit_msg) - file_info, deleted_paths, _, branch_name, diff_stat = res + file_info, deleted_paths, _, branch_name, diff_stat, pr_target_repo = res - new_pr_from_branch(branch_name, title=title, descr=descr, pr_metadata=(file_info, deleted_paths, diff_stat)) + new_pr_from_branch(branch_name, title=title, descr=descr, pr_target_repo=pr_target_repo, + pr_metadata=(file_info, deleted_paths, diff_stat)) -def det_account_branch_for_pr(pr_id, github_user=None): +def det_account_branch_for_pr(pr_id, github_user=None, pr_target_repo=None): """Determine account & branch corresponding to pull request with specified id.""" if github_user is None: @@ -1525,7 +1575,8 @@ def det_account_branch_for_pr(pr_id, github_user=None): raise EasyBuildError("GitHub username (--github-user) must be specified!") pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + if pr_target_repo is None: + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, _ = fetch_pr_data(pr_id, pr_target_account, pr_target_repo, github_user) @@ -1538,6 +1589,47 @@ def det_account_branch_for_pr(pr_id, github_user=None): return account, branch +def det_pr_target_repo(paths): + """Determine target repository for pull request from given cagetorized list of files + + :param paths: paths to categorized lists of files (easyconfigs, files to delete, patches, .py files) + """ + pr_target_repo = build_option('pr_target_repo') + + # determine target repository for PR based on which files are provided + # (see categorize_files_by_type function) + if pr_target_repo is None: + + _log.info("Trying to derive target repository based on specified files...") + + easyconfigs, files_to_delete, patch_files, py_files = [paths[key] for key in sorted(paths.keys())] + + # Python files provided, and no easyconfig files or patches + if py_files and not (easyconfigs or patch_files): + + _log.info("Only Python files provided, no easyconfig files or patches...") + + # if all Python files are easyblocks, target repo should be easyblocks; + # otherwise, target repo is assumed to be framework + if all([get_easyblock_class_name(path) for path in py_files]): + pr_target_repo = GITHUB_EASYBLOCKS_REPO + _log.info("All Python files are easyblocks, target repository is assumed to be %s", pr_target_repo) + else: + pr_target_repo = GITHUB_FRAMEWORK_REPO + _log.info("Not all Python files are easyblocks, target repository is assumed to be %s", pr_target_repo) + + # if no Python files are provided, only easyconfigs & patches, or if files to delete are .eb files, + # then target repo is assumed to be easyconfigs + elif easyconfigs or patch_files or (files_to_delete and all(x.endswith('.eb') for x in files_to_delete)): + pr_target_repo = GITHUB_EASYCONFIGS_REPO + _log.info("Only easyconfig and patch files found, target repository is assumed to be %s", pr_target_repo) + + else: + _log.info("No Python files, easyconfigs or patches found, can't derive target repository...") + + return pr_target_repo + + @only_if_module_is_available('git', pkgname='GitPython') def update_branch(branch_name, paths, ecs, github_account=None, commit_msg=None): """ @@ -1557,12 +1649,13 @@ def update_branch(branch_name, paths, ecs, github_account=None, commit_msg=None) if github_account is None: github_account = build_option('github_user') or build_option('github_org') - _, _, _, _, diff_stat = _easyconfigs_pr_common(paths, ecs, start_branch=branch_name, pr_branch=branch_name, - start_account=github_account, commit_msg=commit_msg) + _, _, _, _, diff_stat, pr_target_repo = _easyconfigs_pr_common(paths, ecs, start_branch=branch_name, + pr_branch=branch_name, start_account=github_account, + commit_msg=commit_msg) print_msg("Overview of changes:\n%s\n" % diff_stat, log=_log, prefix=False) - full_repo = '%s/%s' % (github_account, build_option('pr_target_repo')) + full_repo = '%s/%s' % (github_account, pr_target_repo) msg = "pushed updated branch '%s' to %s" % (branch_name, full_repo) if build_option('dry_run') or build_option('extended_dry_run'): msg += " [DRY RUN]" @@ -1580,17 +1673,44 @@ def update_pr(pr_id, paths, ecs, commit_msg=None): :param commit_msg: commit message to use """ - github_account, branch_name = det_account_branch_for_pr(pr_id) + pr_target_repo = det_pr_target_repo(paths) + if pr_target_repo is None: + raise EasyBuildError("Failed to determine target repository, please specify it via --pr-target-repo!") + + github_account, branch_name = det_account_branch_for_pr(pr_id, pr_target_repo=pr_target_repo) update_branch(branch_name, paths, ecs, github_account=github_account, commit_msg=commit_msg) - full_repo = '%s/%s' % (build_option('pr_target_account'), build_option('pr_target_repo')) + full_repo = '%s/%s' % (build_option('pr_target_account'), pr_target_repo) msg = "updated https://github.com/%s/pull/%s" % (full_repo, pr_id) if build_option('dry_run') or build_option('extended_dry_run'): msg += " [DRY RUN]" print_msg(msg, log=_log) +def check_online_status(): + """ + Check whether we currently are online + Return True if online, else a list of error messages + """ + # Try repeatedly and with different URLs to cater for flaky servers + # E.g. Github returned "HTTP Error 403: Forbidden" and "HTTP Error 406: Not Acceptable" randomly + # Timeout and repeats set to total 1 minute + urls = [GITHUB_API_URL + '/rate_limit', GITHUB_URL, GITHUB_API_URL] + num_repeats = 6 + errors = set() # Use set to record only unique errors + for attempt in range(num_repeats): + # Cycle through URLs + url = urls[attempt % len(urls)] + try: + urlopen(url, timeout=10) + errors = None + break + except URLError as err: + errors.add('%s: %s' % (url, err)) + return sorted(errors) if errors else True + + def check_github(): """ Check status of GitHub integration, and report back. @@ -1601,6 +1721,8 @@ def check_github(): * check whether creating gists works * check whether location to local working directories for Git repositories is available (not strictly needed) """ + debug = build_option('debug') + # start by assuming that everything works, individual checks will disable action that won't work status = {} for action in ['--from-pr', '--new-pr', '--review-pr', '--upload-test-report', '--update-pr']: @@ -1609,12 +1731,12 @@ def check_github(): print_msg("\nChecking status of GitHub integration...\n", log=_log, prefix=False) # check whether we're online; if not, half of the checks are going to fail... - try: - print_msg("Making sure we're online...", log=_log, prefix=False, newline=False) - urlopen(GITHUB_URL, timeout=5) + print_msg("Making sure we're online...", log=_log, prefix=False, newline=False) + online_state = check_online_status() + if online_state is True: print_msg("OK\n", log=_log, prefix=False) - except URLError as err: - print_msg("FAIL") + else: + print_msg("FAIL (%s)", ', '.join(online_state), log=_log, prefix=False) raise EasyBuildError("checking status of GitHub integration must be done online") # GitHub user @@ -1693,9 +1815,9 @@ def check_github(): git_repo, res, push_err = None, None, None branch_name = 'test_branch_%s' % ''.join(random.choice(ascii_letters) for _ in range(5)) try: - git_repo = init_repo(git_working_dir, GITHUB_EASYCONFIGS_REPO, silent=True) + git_repo = init_repo(git_working_dir, GITHUB_EASYCONFIGS_REPO, silent=not debug) remote_name = setup_repo(git_repo, github_account, GITHUB_EASYCONFIGS_REPO, 'master', - silent=True, git_only=True) + silent=not debug, git_only=True) git_repo.create_head(branch_name) res = getattr(git_repo.remotes, remote_name).push(branch_name) except Exception as err: @@ -1726,12 +1848,11 @@ def check_github(): print_msg(check_res, log=_log, prefix=False) # cleanup: delete test branch that was pushed to GitHub - if git_repo: + if git_repo and push_err is None: try: - if git_repo and hasattr(git_repo, 'remotes') and hasattr(git_repo.remotes, 'origin'): - git_repo.remotes.origin.push(branch_name, delete=True) + getattr(git_repo.remotes, remote_name).push(branch_name, delete=True) except GitCommandError as err: - sys.stderr.write("WARNNIG: failed to delete test branch from GitHub: %s\n" % err) + sys.stderr.write("WARNING: failed to delete test branch from GitHub: %s\n" % err) # test creating a gist print_msg("* creating gists...", log=_log, prefix=False, newline=False) @@ -2015,7 +2136,7 @@ def sync_pr_with_develop(pr_id): raise EasyBuildError("GitHub user must be specified to use --sync-pr-with-develop") target_account = build_option('pr_target_account') - target_repo = build_option('pr_target_repo') + target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_account, pr_branch = det_account_branch_for_pr(pr_id) @@ -2038,7 +2159,7 @@ def sync_branch_with_develop(branch_name): raise EasyBuildError("GitHub user must be specified to use --sync-branch-with-develop") target_account = build_option('pr_target_account') - target_repo = build_option('pr_target_repo') + target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO # initialize repository git_working_dir = tempfile.mkdtemp(prefix='git-working-dir') @@ -2053,3 +2174,11 @@ def sync_branch_with_develop(branch_name): # push updated branch back to GitHub (unless we're doing a dry run) return push_branch_to_github(git_repo, github_account, target_repo, branch_name) + + +# copy functions for --new-pr +COPY_FUNCTIONS = { + GITHUB_EASYCONFIGS_REPO: copy_easyconfigs, + GITHUB_EASYBLOCKS_REPO: copy_easyblocks, + GITHUB_FRAMEWORK_REPO: copy_framework_files, +} diff --git a/easybuild/tools/include.py b/easybuild/tools/include.py index 90b9715280..2e85d99e20 100644 --- a/easybuild/tools/include.py +++ b/easybuild/tools/include.py @@ -31,6 +31,7 @@ import os import re import sys +import tempfile from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError @@ -147,14 +148,17 @@ def is_software_specific_easyblock(module): def include_easyblocks(tmpdir, paths): """Include generic and software-specific easyblocks found in specified locations.""" - easyblocks_path = os.path.join(tmpdir, 'included-easyblocks') + easyblocks_path = tempfile.mkdtemp(dir=tmpdir, prefix='included-easyblocks-') set_up_eb_package(easyblocks_path, 'easybuild.easyblocks', subpkgs=['generic'], pkg_init_body=EASYBLOCKS_PKG_INIT_BODY) easyblocks_dir = os.path.join(easyblocks_path, 'easybuild', 'easyblocks') - allpaths = [p for p in expand_glob_paths(paths) if os.path.basename(p) != '__init__.py'] + allpaths = [p for p in expand_glob_paths(paths) + if os.path.basename(p).endswith('.py') and + os.path.basename(p) != '__init__.py'] + for easyblock_module in allpaths: filename = os.path.basename(easyblock_module) diff --git a/easybuild/tools/modules.py b/easybuild/tools/modules.py index 8bfc737bc3..8a5323434d 100644 --- a/easybuild/tools/modules.py +++ b/easybuild/tools/modules.py @@ -649,24 +649,28 @@ def show(self, mod_name): return ans - def get_value_from_modulefile(self, mod_name, regex): + def get_value_from_modulefile(self, mod_name, regex, strict=True): """ Get info from the module file for the specified module. :param mod_name: module name :param regex: (compiled) regular expression, with one group """ + value = None + if self.exist([mod_name], skip_avail=True)[0]: modinfo = self.show(mod_name) res = regex.search(modinfo) if res: - return res.group(1) - else: + value = res.group(1) + elif strict: raise EasyBuildError("Failed to determine value from 'show' (pattern: '%s') in %s", regex.pattern, modinfo) - else: + elif strict: raise EasyBuildError("Can't get value from a non-existing module %s", mod_name) + return value + def modulefile_path(self, mod_name, strip_ext=False): """ Get the path of the module file for the specified module @@ -1086,6 +1090,15 @@ def path_to_top_of_module_tree(self, top_paths, mod_name, full_mod_subdir, deps, self.log.debug("Path to top of module tree from %s: %s" % (mod_name, path)) return path + def get_setenv_value_from_modulefile(self, mod_name, var_name): + """ + Get value for specific 'setenv' statement from module file for the specified module. + + :param mod_name: module name + :param var_name: name of the variable being set for which value should be returned + """ + raise NotImplementedError + def update(self): """Update after new modules were added.""" raise NotImplementedError @@ -1126,6 +1139,26 @@ def update(self): """Update after new modules were added.""" pass + def get_setenv_value_from_modulefile(self, mod_name, var_name): + """ + Get value for specific 'setenv' statement from module file for the specified module. + + :param mod_name: module name + :param var_name: name of the variable being set for which value should be returned + """ + # Tcl-based module tools produce "module show" output with setenv statements like: + # "setenv GCC_PATH /opt/gcc/8.3.0" + # - line starts with 'setenv' + # - whitespace (spaces & tabs) around variable name + # - no quotes or parentheses around value (which can contain spaces!) + regex = re.compile(r'^setenv\s+%s\s+(?P.+)' % var_name, re.M) + value = self.get_value_from_modulefile(mod_name, regex, strict=False) + + if value: + value = value.strip() + + return value + class EnvironmentModulesTcl(EnvironmentModulesC): """Interface to (Tcl) environment modules (modulecmd.tcl).""" @@ -1390,6 +1423,27 @@ def exist(self, mod_names, skip_avail=False, maybe_partial=True): return super(Lmod, self).exist(mod_names, mod_exists_regex_template=r'^\s*\S*/%s.*(\.lua)?:\s*$', skip_avail=skip_avail, maybe_partial=maybe_partial) + def get_setenv_value_from_modulefile(self, mod_name, var_name): + """ + Get value for specific 'setenv' statement from module file for the specified module. + + :param mod_name: module name + :param var_name: name of the variable being set for which value should be returned + """ + # Lmod produces "module show" output with setenv statements like: + # setenv("EBROOTBZIP2","/tmp/software/bzip2/1.0.6") + # - line starts with setenv( + # - both variable name and value are enclosed in double quotes, separated by comma + # - value can contain spaces! + # - line ends with ) + regex = re.compile(r'^setenv\("%s"\s*,\s*"(?P.+)"\)' % var_name, re.M) + value = self.get_value_from_modulefile(mod_name, regex, strict=False) + + if value: + value = value.strip() + + return value + def get_software_root_env_var_name(name): """Return name of environment variable for software root.""" diff --git a/easybuild/tools/options.py b/easybuild/tools/options.py index d149ee3d79..2a09600f78 100644 --- a/easybuild/tools/options.py +++ b/easybuild/tools/options.py @@ -55,13 +55,14 @@ from easybuild.framework.easyconfig.format.pyheaderconfigobj import build_easyconfig_constants_dict from easybuild.framework.easyconfig.format.yeb import YEB_FORMAT_EXTENSION from easybuild.framework.easyconfig.tools import alt_easyconfig_paths, get_paths_for +from easybuild.toolchains.compiler.systemcompiler import TC_CONSTANT_SYSTEM from easybuild.tools import build_log, run # build_log should always stay there, to ensure EasyBuildLog from easybuild.tools.build_log import DEVEL_LOG_LEVEL, EasyBuildError from easybuild.tools.build_log import init_logging, log_start, print_warning, raise_easybuilderror -from easybuild.tools.config import CONT_IMAGE_FORMATS, CONT_TYPES, DEFAULT_CONT_TYPE -from easybuild.tools.config import DEFAULT_ALLOW_LOADED_MODULES, DEFAULT_FORCE_DOWNLOAD, DEFAULT_JOB_BACKEND -from easybuild.tools.config import DEFAULT_LOGFILE_FORMAT, DEFAULT_MAX_FAIL_RATIO_PERMS, DEFAULT_MNS -from easybuild.tools.config import DEFAULT_MODULE_SYNTAX, DEFAULT_MODULES_TOOL, DEFAULT_MODULECLASSES +from easybuild.tools.config import CONT_IMAGE_FORMATS, CONT_TYPES, DEFAULT_CONT_TYPE, DEFAULT_ALLOW_LOADED_MODULES +from easybuild.tools.config import DEFAULT_BRANCH, DEFAULT_FORCE_DOWNLOAD, DEFAULT_INDEX_MAX_AGE +from easybuild.tools.config import DEFAULT_JOB_BACKEND, DEFAULT_LOGFILE_FORMAT, DEFAULT_MAX_FAIL_RATIO_PERMS +from easybuild.tools.config import DEFAULT_MNS, DEFAULT_MODULE_SYNTAX, DEFAULT_MODULES_TOOL, DEFAULT_MODULECLASSES from easybuild.tools.config import DEFAULT_PATH_SUBDIRS, DEFAULT_PKG_RELEASE, DEFAULT_PKG_TOOL, DEFAULT_PKG_TYPE from easybuild.tools.config import DEFAULT_PNS, DEFAULT_PREFIX, DEFAULT_REPOSITORY, EBROOT_ENV_VAR_ACTIONS, ERROR from easybuild.tools.config import FORCE_DOWNLOAD_CHOICES, GENERAL_CLASS, IGNORE, JOB_DEPS_TYPE_ABORT_ON_ERROR @@ -79,7 +80,7 @@ from easybuild.tools.github import GITHUB_PR_DIRECTION_DESC, GITHUB_PR_ORDER_CREATED, GITHUB_PR_STATE_OPEN from easybuild.tools.github import GITHUB_PR_STATES, GITHUB_PR_ORDERS, GITHUB_PR_DIRECTIONS from easybuild.tools.github import HAVE_GITHUB_API, HAVE_KEYRING, VALID_CLOSE_PR_REASONS -from easybuild.tools.github import fetch_github_token +from easybuild.tools.github import fetch_easyblocks_from_pr, fetch_github_token from easybuild.tools.hooks import KNOWN_HOOKS from easybuild.tools.include import include_easyblocks, include_module_naming_schemes, include_toolchains from easybuild.tools.job.backend import avail_job_backends @@ -92,9 +93,10 @@ from easybuild.tools.run import run_cmd from easybuild.tools.package.utilities import avail_package_naming_schemes from easybuild.tools.toolchain.compiler import DEFAULT_OPT_LEVEL, OPTARCH_MAP_CHAR, OPTARCH_SEP, Compiler +from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME from easybuild.tools.repository.repository import avail_repositories -from easybuild.tools.systemtools import check_python_version, get_cpu_architecture, get_cpu_family, get_cpu_features -from easybuild.tools.systemtools import get_system_info +from easybuild.tools.systemtools import UNKNOWN, check_python_version, get_cpu_architecture, get_cpu_family +from easybuild.tools.systemtools import get_cpu_features, get_system_info from easybuild.tools.version import this_is_easybuild @@ -253,8 +255,13 @@ def basic_options(self): 'extended-dry-run-ignore-errors': ("Ignore errors that occur during dry run", None, 'store_true', True), 'force': ("Force to rebuild software even if it's already installed (i.e. if it can be found as module), " "and skipping check for OS dependencies", None, 'store_true', False, 'f'), + 'ignore-locks': ("Ignore locks that prevent two identical installations running in parallel", + None, 'store_true', False), 'job': ("Submit the build as a job", None, 'store_true', False), 'logtostdout': ("Redirect main log to stdout", None, 'store_true', False, 'l'), + 'locks-dir': ("Directory to store lock files (should be on a shared filesystem); " + "None implies .locks subdirectory of software installation directory", + None, 'store_or_None', None), 'missing-modules': ("Print list of missing modules for dependencies of specified easyconfigs", None, 'store_true', False, 'M'), 'only-blocks': ("Only build listed blocks", 'strlist', 'extend', None, 'b', {'metavar': 'BLOCKS'}), @@ -311,6 +318,9 @@ def software_options(self): opts['map-toolchains'] = ("Enable mapping of (sub)toolchains when --try-toolchain(-version) is used", None, 'store_true', True) + opts['try-update-deps'] = ("Try to update versions of the dependencies of an easyconfig based on what is " + "available in the robot path", + None, 'store_true', False) self.log.debug("software_options: descr %s opts %s" % (descr, opts)) self.add_group_parser(opts, descr) @@ -432,6 +442,8 @@ def override_options(self): None, 'store_true', False), 'verify-easyconfig-filenames': ("Verify whether filename of specified easyconfigs matches with contents", None, 'store_true', False), + 'wait-on-lock': ("Wait interval (in seconds) to use when waiting for existing lock to be removed " + "(0: implies no waiting, but exiting with an error)", int, 'store', 0), 'zip-logs': ("Zip logs that are copied to install directory, using specified command", None, 'store_or_None', 'gzip'), @@ -592,6 +604,8 @@ def github_options(self): 'git-working-dirs-path': ("Path to Git working directories for EasyBuild repositories", str, 'store', None), 'github-user': ("GitHub username", str, 'store', None), 'github-org': ("GitHub organization", str, 'store', None), + 'include-easyblocks-from-pr': ("Include easyblocks from specified PR", int, 'store', None, + {'metavar': 'PR#'}), 'install-github-token': ("Install GitHub token (requires --github-user)", None, 'store_true', False), 'close-pr': ("Close pull request", int, 'store', None, {'metavar': 'PR#'}), 'close-pr-msg': ("Custom close message for pull request closed with --close-pr; ", str, 'store', None), @@ -609,8 +623,9 @@ def github_options(self): 'pr-commit-msg': ("Commit message for new/updated pull request created with --new-pr", str, 'store', None), 'pr-descr': ("Description for new pull request created with --new-pr", str, 'store', None), 'pr-target-account': ("Target account for new PRs", str, 'store', GITHUB_EB_MAIN), - 'pr-target-branch': ("Target branch for new PRs", str, 'store', 'develop'), - 'pr-target-repo': ("Target repository for new/updating PRs", str, 'store', GITHUB_EASYCONFIGS_REPO), + 'pr-target-branch': ("Target branch for new PRs", str, 'store', DEFAULT_BRANCH), + 'pr-target-repo': ("Target repository for new/updating PRs (default: auto-detect based on provided files)", + str, 'store', None), 'pr-title': ("Title for new pull request created with --new-pr", str, 'store', None), 'preview-pr': ("Preview a new pull request", None, 'store_true', False), 'sync-branch-with-develop': ("Sync branch with current 'develop' branch", str, 'store', None), @@ -681,8 +696,12 @@ def easyconfig_options(self): descr = ("Options for Easyconfigs", "Options that affect all specified easyconfig files.") opts = OrderedDict({ + 'create-index': ("Create index for files in specified directory", None, 'store', None), 'fix-deprecated-easyconfigs': ("Fix use of deprecated functionality in specified easyconfig files.", None, 'store_true', False), + 'ignore-index': ("Ignore index when searching for files", None, 'store_true', False), + 'index-max-age': ("Maximum age for index before it is considered stale (in seconds)", + int, 'store', DEFAULT_INDEX_MAX_AGE), 'inject-checksums': ("Inject checksums of specified type for sources/patches into easyconfig file(s)", 'choice', 'store_or_None', CHECKSUM_TYPE_SHA256, CHECKSUM_TYPES), 'local-var-naming-check': ("Mode to use when checking whether local variables follow the recommended " @@ -738,8 +757,11 @@ def validate(self): for opt in ['software', 'try-software', 'toolchain', 'try-toolchain']: val = getattr(self.options, opt.replace('-', '_')) if val and len(val) != 2: - msg = "--%s requires NAME,VERSION (given %s)" % (opt, ','.join(val)) - error_msgs.append(msg) + if opt in ['toolchain', 'try-toolchain'] and val == [TC_CONSTANT_SYSTEM]: + setattr(self.options, opt.replace('-', '_'), [SYSTEM_TOOLCHAIN_NAME, SYSTEM_TOOLCHAIN_NAME]) + else: + msg = "--%s requires NAME,VERSION (given %s)" % (opt, ','.join(val)) + error_msgs.append(msg) if self.options.umask: umask_regex = re.compile('^[0-7]{3}$') @@ -922,7 +944,7 @@ def _postprocess_checks(self): """Check whether (combination of) configuration options make sense.""" # fail early if required dependencies for functionality requiring using GitHub API are not available: - if self.options.from_pr or self.options.upload_test_report: + if self.options.from_pr or self.options.include_easyblocks_from_pr or self.options.upload_test_report: if not HAVE_GITHUB_API: raise EasyBuildError("Required support for using GitHub API is not available (see warnings)") @@ -1040,8 +1062,8 @@ def _postprocess_list_avail(self): if self.options.avail_easyconfig_licenses: msg += avail_easyconfig_licenses(self.options.output_format) - # dump available easyblocks - if self.options.list_easyblocks: + # dump available easyblocks (unless including easyblocks from pr, in which case it will be done later) + if self.options.list_easyblocks and not self.options.include_easyblocks_from_pr: msg += list_easyblocks(self.options.list_easyblocks, self.options.output_format) # dump known toolchains @@ -1085,7 +1107,8 @@ def _postprocess_list_avail(self): print(msg) # cleanup tmpdir and exit - cleanup_and_exit(self.tmpdir) + if not self.options.include_easyblocks_from_pr: + cleanup_and_exit(self.tmpdir) def avail_repositories(self): """Show list of known repository types.""" @@ -1148,6 +1171,7 @@ def show_system_info(self): """Show system information.""" system_info = get_system_info() cpu_features = get_cpu_features() + cpu_arch_name = system_info['cpu_arch_name'] lines = [ "System information (%s):" % system_info['hostname'], '', @@ -1161,6 +1185,13 @@ def show_system_info(self): " -> vendor: %s" % system_info['cpu_vendor'], " -> architecture: %s" % get_cpu_architecture(), " -> family: %s" % get_cpu_family(), + ] + if cpu_arch_name == UNKNOWN: + lines.append(" -> arch name: UNKNOWN (archspec is not installed?)") + else: + lines.append(" -> arch name: %s" % cpu_arch_name) + + lines.extend([ " -> model: %s" % system_info['cpu_model'], " -> speed: %s" % system_info['cpu_speed'], " -> cores: %s" % system_info['core_count'], @@ -1170,7 +1201,8 @@ def show_system_info(self): " -> glibc version: %s" % system_info['glibc_version'], " -> Python binary: %s" % sys.executable, " -> Python version: %s" % sys.version.split(' ')[0], - ] + ]) + return '\n'.join(lines) def show_config(self): @@ -1393,6 +1425,29 @@ def set_up_configuration(args=None, logfile=None, testing=False, silent=False): init(options, config_options_dict) init_build_options(build_options=build_options, cmdline_options=options) + # done here instead of in _postprocess_include because github integration requires build_options to be initialized + if eb_go.options.include_easyblocks_from_pr: + easyblocks_from_pr = fetch_easyblocks_from_pr(eb_go.options.include_easyblocks_from_pr) + + if eb_go.options.include_easyblocks: + # make sure we're not including the same easyblock twice + included_from_pr = set([os.path.basename(eb) for eb in easyblocks_from_pr]) + included_from_file = set([os.path.basename(eb) for eb in eb_go.options.include_easyblocks]) + included_twice = included_from_pr & included_from_file + if included_twice: + raise EasyBuildError("Multiple inclusion of %s, check your --include-easyblocks options", + ','.join(included_twice)) + + include_easyblocks(eb_go.options.tmpdir, easyblocks_from_pr) + + if eb_go.options.list_easyblocks: + msg = list_easyblocks(eb_go.options.list_easyblocks, eb_go.options.output_format) + if eb_go.options.unittest_file: + log.info(msg) + else: + print(msg) + cleanup_and_exit(tmpdir) + check_python_version() # move directory containing fake vsc namespace into temporary directory used for this session @@ -1429,6 +1484,7 @@ def process_software_build_specs(options): 'version': options.try_software_version, 'toolchain_name': options.try_toolchain_name, 'toolchain_version': options.try_toolchain_version, + 'update_deps': options.try_update_deps } # process easy options diff --git a/easybuild/tools/parallelbuild.py b/easybuild/tools/parallelbuild.py index da824d66fd..8a7348fb18 100644 --- a/easybuild/tools/parallelbuild.py +++ b/easybuild/tools/parallelbuild.py @@ -68,7 +68,7 @@ def build_easyconfigs_in_parallel(build_command, easyconfigs, output_dir='easybu :param output_dir: output directory :param prepare_first: prepare by runnning fetch step first for each easyconfig """ - _log.info("going to build these easyconfigs in parallel: %s", easyconfigs) + _log.info("going to build these easyconfigs in parallel: %s", [os.path.basename(ec['spec']) for ec in easyconfigs]) active_job_backend = job_backend() if active_job_backend is None: @@ -94,7 +94,7 @@ def build_easyconfigs_in_parallel(build_command, easyconfigs, output_dir='easybu prepare_easyconfig(easyconfig) # the new job will only depend on already submitted jobs - _log.info("creating job for ec: %s" % easyconfig['ec']) + _log.info("creating job for ec: %s" % os.path.basename(easyconfig['spec'])) new_job = create_job(active_job_backend, build_command, easyconfig, output_dir=output_dir) # filter out dependencies marked as external modules diff --git a/easybuild/tools/repository/gitrepo.py b/easybuild/tools/repository/gitrepo.py index f34a95088e..d9f84d6700 100644 --- a/easybuild/tools/repository/gitrepo.py +++ b/easybuild/tools/repository/gitrepo.py @@ -44,7 +44,7 @@ from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.repository.filerepo import FileRepository from easybuild.tools.utilities import only_if_module_is_available from easybuild.tools.version import VERSION @@ -188,6 +188,6 @@ def cleanup(self): """ try: self.wc = os.path.dirname(self.wc) - rmtree2(self.wc) + remove_dir(self.wc) except IOError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err) diff --git a/easybuild/tools/repository/hgrepo.py b/easybuild/tools/repository/hgrepo.py index 000dd9b5b8..cb121f5cb2 100644 --- a/easybuild/tools/repository/hgrepo.py +++ b/easybuild/tools/repository/hgrepo.py @@ -44,7 +44,7 @@ from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.repository.filerepo import FileRepository _log = fancylogger.getLogger('hgrepo', fname=False) @@ -192,6 +192,6 @@ def cleanup(self): Clean up mercurial working copy. """ try: - rmtree2(self.wc) + remove_dir(self.wc) except IOError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err) diff --git a/easybuild/tools/repository/svnrepo.py b/easybuild/tools/repository/svnrepo.py index 6dc0f3c7b0..24dfcb8811 100644 --- a/easybuild/tools/repository/svnrepo.py +++ b/easybuild/tools/repository/svnrepo.py @@ -44,7 +44,7 @@ from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.repository.filerepo import FileRepository from easybuild.tools.utilities import only_if_module_is_available @@ -190,6 +190,6 @@ def cleanup(self): Clean up SVN working copy. """ try: - rmtree2(self.wc) + remove_dir(self.wc) except OSError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err) diff --git a/easybuild/tools/robot.py b/easybuild/tools/robot.py index 9bf941edab..cd3a2dca55 100644 --- a/easybuild/tools/robot.py +++ b/easybuild/tools/robot.py @@ -469,20 +469,36 @@ def resolve_dependencies(easyconfigs, modtool, retain_all_deps=False, raise_erro return ordered_ecs -def search_easyconfigs(query, short=False, filename_only=False, terse=False): - """Search for easyconfigs, if a query is provided.""" +def search_easyconfigs(query, short=False, filename_only=False, terse=False, consider_extra_paths=True, + print_result=True, case_sensitive=False): + """ + Search for easyconfigs, if a query is provided. + + :param query: regex query string + :param short: figure out common prefix of hits, use variable to factor it out + :param filename_only: only print filenames, not paths + :param terse: stick to terse (machine-readable) output, as opposed to pretty-printing + :param consider_extra_paths: consider all paths when searching + :param print_result: print the list of easyconfigs + :param case_sensitive: boolean to decide whether search is case sensitive + + :return: return a list of paths for the query + """ search_path = build_option('robot_path') if not search_path: search_path = [os.getcwd()] extra_search_paths = build_option('search_paths') - if extra_search_paths: - search_path.extend(extra_search_paths) + # If we're returning a list of possible resolutions by the robot, don't include the extra_search_paths + if extra_search_paths and consider_extra_paths: + # we shouldn't use += or .extend here but compose a new list, + # to avoid adding a path to the list returned by build_option('robot_path') ! + search_path = search_path + extra_search_paths ignore_dirs = build_option('ignore_dirs') # note: don't pass down 'filename_only' here, we need the full path to filter out archived easyconfigs var_defs, _hits = search_file(search_path, query, short=short, ignore_dirs=ignore_dirs, terse=terse, - silent=True, filename_only=False) + silent=True, filename_only=False, case_sensitive=case_sensitive) # filter out archived easyconfigs, these are handled separately hits, archived_hits = [], [] @@ -492,32 +508,42 @@ def search_easyconfigs(query, short=False, filename_only=False, terse=False): else: hits.append(hit) - # check whether only filenames should be printed + # check whether only filenames should be used if filename_only: hits = [os.path.basename(hit) for hit in hits] archived_hits = [os.path.basename(hit) for hit in archived_hits] - # prepare output format - if terse: - lines, tmpl = [], '%s' + if print_result: + # prepare output format + if terse: + lines, tmpl = [], '%s' + else: + lines = ['%s=%s' % var_def for var_def in var_defs] + tmpl = ' * %s' + + # non-archived hits are shown first + lines.extend(tmpl % hit for hit in hits) + + # also take into account archived hits + if archived_hits: + if build_option('consider_archived_easyconfigs'): + if not terse: + lines.extend(['', "Matching archived easyconfigs:", '']) + lines.extend(tmpl % hit for hit in archived_hits) + elif not terse: + cnt = len(archived_hits) + lines.extend([ + '', + "Note: %d matching archived easyconfig(s) found, use --consider-archived-easyconfigs to see them" + % cnt, + ]) + + print('\n'.join(lines)) + + # if requested return the matches as a list + if build_option('consider_archived_easyconfigs'): + final_hits = hits + archived_hits else: - lines = ['%s=%s' % var_def for var_def in var_defs] - tmpl = ' * %s' - - # non-archived hits are shown first - lines.extend(tmpl % hit for hit in hits) - - # also take into account archived hits - if archived_hits: - if build_option('consider_archived_easyconfigs'): - if not terse: - lines.extend(['', "Matching archived easyconfigs:", '']) - lines.extend(tmpl % hit for hit in archived_hits) - elif not terse: - cnt = len(archived_hits) - lines.extend([ - '', - "Note: %d matching archived easyconfig(s) found, use --consider-archived-easyconfigs to see them" % cnt, - ]) - - print('\n'.join(lines)) + final_hits = hits + + return final_hits diff --git a/easybuild/tools/run.py b/easybuild/tools/run.py index 7e22e8c0ad..a3471abc04 100644 --- a/easybuild/tools/run.py +++ b/easybuild/tools/run.py @@ -175,6 +175,9 @@ def run_cmd(cmd, log_ok=True, log_all=False, simple=False, inp=None, regexp=True if trace: trace_txt = "running command:\n" trace_txt += "\t[started at: %s]\n" % start_time.strftime('%Y-%m-%d %H:%M:%S') + trace_txt += "\t[working dir: %s]\n" % (path or os.getcwd()) + if inp: + trace_txt += "\t[input: %s]\n" % inp trace_txt += "\t[output logged in %s]\n" % cmd_log_fn trace_msg(trace_txt + '\t' + cmd_msg) @@ -300,6 +303,7 @@ def run_cmd_qa(cmd, qa, no_qa=None, log_ok=True, log_all=False, simple=False, re if trace: trace_txt = "running interactive command:\n" trace_txt += "\t[started at: %s]\n" % start_time.strftime('%Y-%m-%d %H:%M:%S') + trace_txt += "\t[working dir: %s]\n" % (path or os.getcwd()) trace_txt += "\t[output logged in %s]\n" % cmd_log_fn trace_msg(trace_txt + '\t' + cmd.strip()) diff --git a/easybuild/tools/systemtools.py b/easybuild/tools/systemtools.py index 1b285e191d..13e28cc6f3 100644 --- a/easybuild/tools/systemtools.py +++ b/easybuild/tools/systemtools.py @@ -59,6 +59,14 @@ _log.debug("Failed to import 'distro' Python module: %s", err) HAVE_DISTRO = False +try: + from archspec.cpu import host as archspec_cpu_host + HAVE_ARCHSPEC = True +except ImportError as err: + _log.debug("Failed to import 'archspec' Python module: %s", err) + HAVE_ARCHSPEC = False + + # Architecture constants AARCH32 = 'AArch32' @@ -344,6 +352,22 @@ def get_cpu_family(): return family +def get_cpu_arch_name(): + """ + Determine CPU architecture name via archspec (if available). + """ + cpu_arch_name = None + if HAVE_ARCHSPEC: + res = archspec_cpu_host() + if res: + cpu_arch_name = res.name + + if cpu_arch_name is None: + cpu_arch_name = UNKNOWN + + return cpu_arch_name + + def get_cpu_model(): """ Determine CPU model, e.g., Intel(R) Core(TM) i5-2540M CPU @ 2.60GHz @@ -563,6 +587,7 @@ def get_os_name(): os_name_map = { 'red hat enterprise linux server': 'RHEL', + 'red hat enterprise linux': 'RHEL', # RHEL8 has no server/client 'scientific linux sl': 'SL', 'scientific linux': 'SL', 'suse linux enterprise server': 'SLES', @@ -745,6 +770,7 @@ def get_system_info(): return { 'core_count': get_avail_core_count(), 'total_memory': get_total_memory(), + 'cpu_arch_name': get_cpu_arch_name(), 'cpu_model': get_cpu_model(), 'cpu_speed': get_cpu_speed(), 'cpu_vendor': get_cpu_vendor(), diff --git a/easybuild/tools/testing.py b/easybuild/tools/testing.py index 456e7c0db2..ec7d83ba37 100644 --- a/easybuild/tools/testing.py +++ b/easybuild/tools/testing.py @@ -46,11 +46,11 @@ from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import build_option from easybuild.tools.filetools import find_easyconfigs, mkdir, read_file, write_file -from easybuild.tools.github import create_gist, post_comment_in_issue +from easybuild.tools.github import GITHUB_EASYCONFIGS_REPO, create_gist, post_comment_in_issue from easybuild.tools.jenkins import aggregate_xml_in_dirs from easybuild.tools.parallelbuild import build_easyconfigs_in_parallel from easybuild.tools.robot import resolve_dependencies -from easybuild.tools.systemtools import get_system_info +from easybuild.tools.systemtools import UNKNOWN, get_system_info from easybuild.tools.version import FRAMEWORK_VERSION, EASYBLOCKS_VERSION @@ -140,7 +140,10 @@ def session_state(): def create_test_report(msg, ecs_with_res, init_session_state, pr_nr=None, gist_log=False): """Create test report for easyconfigs PR, in Markdown format.""" - user = build_option('github_user') + + github_user = build_option('github_user') + pr_target_account = build_option('pr_target_account') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO end_time = gmtime() @@ -148,7 +151,7 @@ def create_test_report(msg, ecs_with_res, init_session_state, pr_nr=None, gist_l test_report = [] if pr_nr is not None: test_report.extend([ - "Test report for https://github.com/easybuilders/easybuild-easyconfigs/pull/%s" % pr_nr, + "Test report for https://github.com/%s/%s/pull/%s" % (pr_target_account, pr_target_repo, pr_nr), "", ]) test_report.extend([ @@ -182,7 +185,7 @@ def create_test_report(msg, ecs_with_res, init_session_state, pr_nr=None, gist_l if pr_nr is not None: descr += " (PR #%s)" % pr_nr fn = '%s_partial.log' % os.path.basename(ec['spec'])[:-3] - gist_url = create_gist(partial_log_txt, fn, descr=descr, github_user=user) + gist_url = create_gist(partial_log_txt, fn, descr=descr, github_user=github_user) test_log = "(partial log available at %s)" % gist_url build_overview.append(" * **%s** _%s_ %s" % (test_result, os.path.basename(ec['spec']), test_log)) @@ -239,23 +242,32 @@ def upload_test_report_as_gist(test_report, descr=None, fn=None): if fn is None: fn = 'easybuild_test_report_%s.md' % strftime("%Y%M%d-UTC-%H-%M-%S", gmtime()) - user = build_option('github_user') + github_user = build_option('github_user') + gist_url = create_gist(test_report, descr=descr, fn=fn, github_user=github_user) - gist_url = create_gist(test_report, descr=descr, fn=fn, github_user=user) return gist_url def post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, success): """Post test report in a gist, and submit comment in easyconfigs PR.""" - user = build_option('github_user') + + github_user = build_option('github_user') + pr_target_account = build_option('pr_target_account') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO # create gist with test report - descr = "EasyBuild test report for easyconfigs PR #%s" % pr_nr - fn = 'easybuild_test_report_easyconfigs_pr%s_%s.md' % (pr_nr, strftime("%Y%M%d-UTC-%H-%M-%S", gmtime())) + descr = "EasyBuild test report for %s/%s PR #%s" % (pr_target_account, pr_target_repo, pr_nr) + timestamp = strftime("%Y%M%d-UTC-%H-%M-%S", gmtime()) + fn = 'easybuild_test_report_%s_%s_pr%s_%s.md' % (pr_nr, pr_target_account, pr_target_repo, timestamp) gist_url = upload_test_report_as_gist(test_report, descr=descr, fn=fn) # post comment to report test result system_info = init_session_state['system_info'] + + # also mention CPU architecture name, but only if it's known + if system_info['cpu_arch_name'] != UNKNOWN: + system_info['cpu_model'] += " (%s)" % system_info['cpu_arch_name'] + short_system_info = "%(hostname)s - %(os_type)s %(os_name)s %(os_version)s, %(cpu_model)s, Python %(pyver)s" % { 'cpu_model': system_info['cpu_model'], 'hostname': system_info['hostname'], @@ -264,15 +276,17 @@ def post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, 'os_version': system_info['os_version'], 'pyver': system_info['python_version'].split(' ')[0], } + comment_lines = [ - "Test report by @%s" % user, + "Test report by @%s" % github_user, ('**FAILED**', '**SUCCESS**')[success], msg, short_system_info, "See %s for a full test report." % gist_url, ] comment = '\n'.join(comment_lines) - post_comment_in_issue(pr_nr, comment, github_user=user) + + post_comment_in_issue(pr_nr, comment, account=pr_target_account, repo=pr_target_repo, github_user=github_user) msg = "Test report uploaded to %s and mentioned in a comment in easyconfigs PR#%s" % (gist_url, pr_nr) return msg diff --git a/easybuild/tools/toolchain/mpi.py b/easybuild/tools/toolchain/mpi.py index 9a30baa33f..052c3e061d 100644 --- a/easybuild/tools/toolchain/mpi.py +++ b/easybuild/tools/toolchain/mpi.py @@ -166,6 +166,22 @@ def mpi_family(self): else: raise EasyBuildError("mpi_family: MPI_FAMILY is undefined.") + def mpi_cmd_prefix(self, nr_ranks=1): + """Construct an MPI command prefix to precede an executable""" + + # Verify that the command appears at the end of mpi_cmd_for + test_cmd = 'xxx_command_xxx' + mpi_cmd = self.mpi_cmd_for(test_cmd, nr_ranks) + if mpi_cmd.rstrip().endswith(test_cmd): + result = mpi_cmd.replace(test_cmd, '').rstrip() + else: + warning_msg = "mpi_cmd_for cannot be used by mpi_cmd_prefix, " + warning_msg += "requires that %(cmd)s template appears at the end" + self.log.warning(warning_msg) + result = None + + return result + def mpi_cmd_for(self, cmd, nr_ranks): """Construct an MPI command for the given command and number of ranks.""" @@ -180,10 +196,10 @@ def mpi_cmd_for(self, cmd, nr_ranks): self.log.info("Using specified template for MPI commands: %s", mpi_cmd_template) else: # different known mpirun commands - mpirun_n_cmd = "mpirun -n %(nr_ranks)d %(cmd)s" + mpirun_n_cmd = "mpirun -n %(nr_ranks)s %(cmd)s" mpi_cmds = { toolchain.OPENMPI: mpirun_n_cmd, - toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)d %(cmd)s", + toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)s %(cmd)s", toolchain.INTELMPI: mpirun_n_cmd, toolchain.MVAPICH2: mpirun_n_cmd, toolchain.MPICH: mpirun_n_cmd, @@ -201,7 +217,7 @@ def mpi_cmd_for(self, cmd, nr_ranks): impi_ver = self.get_software_version(self.MPI_MODULE_NAME)[0] if LooseVersion(impi_ver) <= LooseVersion('4.1'): - mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)d %(cmd)s" + mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s" # set temporary dir for MPD # note: this needs to be kept *short*, @@ -230,7 +246,7 @@ def mpi_cmd_for(self, cmd, nr_ranks): # create nodes file nodes = os.path.join(tmpdir, 'nodes') - write_file(nodes, "localhost\n" * nr_ranks) + write_file(nodes, "localhost\n" * int(nr_ranks)) params.update({'nodesfile': "-machinefile %s" % nodes}) @@ -240,9 +256,19 @@ def mpi_cmd_for(self, cmd, nr_ranks): else: raise EasyBuildError("Don't know which template MPI command to use for MPI family '%s'", mpi_family) + missing = [] + for key in sorted(params.keys()): + tmpl = '%(' + key + ')s' + if tmpl not in mpi_cmd_template: + missing.append(tmpl) + if missing: + raise EasyBuildError("Missing templates in mpi-cmd-template value '%s': %s", + mpi_cmd_template, ', '.join(missing)) + try: res = mpi_cmd_template % params except KeyError as err: - raise EasyBuildError("Failed to complete MPI cmd template '%s' with %s: %s", mpi_cmd_template, params, err) + raise EasyBuildError("Failed to complete MPI cmd template '%s' with %s: KeyError %s", + mpi_cmd_template, params, err) return res diff --git a/easybuild/tools/version.py b/easybuild/tools/version.py index 03ea55c176..79824ff42b 100644 --- a/easybuild/tools/version.py +++ b/easybuild/tools/version.py @@ -43,7 +43,7 @@ # recent setuptools versions will *TRANSFORM* something like 'X.Y.Zdev' into 'X.Y.Z.dev0', with a warning like # UserWarning: Normalizing '2.4.0dev' to '2.4.0.dev0' # This causes problems further up the dependency chain... -VERSION = LooseVersion('4.1.2') +VERSION = LooseVersion('4.2.0') UNKNOWN = 'UNKNOWN' @@ -55,18 +55,18 @@ def get_git_revision(): relies on GitPython (see http://gitorious.org/git-python) """ try: - import git + from git import Git, GitCommandError except ImportError: return UNKNOWN try: path = os.path.dirname(__file__) - gitrepo = git.Git(path) + gitrepo = Git(path) res = gitrepo.rev_list('HEAD').splitlines()[0] # 'encode' may be required to make sure a regular string is returned rather than a unicode string # (only needed in Python 2; in Python 3, regular strings are already unicode) if not isinstance(res, str): res = res.encode('ascii') - except git.GitCommandError: + except GitCommandError: res = UNKNOWN return res diff --git a/requirements.txt b/requirements.txt index 3a93ac826f..0a8591c50f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -50,3 +50,5 @@ GC3Pie python-graph-dot python-hglib requests + +archspec; python_version >= '2.7' diff --git a/setup.cfg b/setup.cfg index e4bfd7cb81..430d761b59 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,4 +19,4 @@ builtins = # ignore "Black would make changes" produced by flake8-black # see also https://github.com/houndci/hound/issues/1769 -ignore = BLK100 +extend-ignore = BLK100 diff --git a/test/framework/build_log.py b/test/framework/build_log.py index 7af9e623f1..0c9ec6894b 100644 --- a/test/framework/build_log.py +++ b/test/framework/build_log.py @@ -237,11 +237,11 @@ def test_log_levels(self): def test_print_warning(self): """Test print_warning""" - def run_check(args, silent=False, expected_stderr=''): + def run_check(args, silent=False, expected_stderr='', **kwargs): """Helper function to check stdout/stderr produced via print_warning.""" self.mock_stderr(True) self.mock_stdout(True) - print_warning(*args, silent=silent) + print_warning(*args, silent=silent, **kwargs) stderr = self.get_stderr() stdout = self.get_stdout() self.mock_stdout(False) @@ -258,6 +258,14 @@ def run_check(args, silent=False, expected_stderr=''): self.assertErrorRegex(EasyBuildError, "Unknown named arguments", print_warning, 'foo', unknown_arg='bar') + # test passing of logger to print_warning + tmp_logfile = os.path.join(self.test_prefix, 'test.log') + logger, _ = init_logging(tmp_logfile, silent=True) + expected = "\nWARNING: Test log message with a logger involved.\n\n" + run_check(["Test log message with a logger involved."], expected_stderr=expected, log=logger) + log_txt = read_file(tmp_logfile) + self.assertTrue("WARNING Test log message with a logger involved." in log_txt) + def test_print_error(self): """Test print_error""" def run_check(args, silent=False, expected_stderr=''): diff --git a/test/framework/easyblock.py b/test/framework/easyblock.py index 00033c106a..25e9789d14 100644 --- a/test/framework/easyblock.py +++ b/test/framework/easyblock.py @@ -51,7 +51,7 @@ from easybuild.tools.modules import reset_module_caches from easybuild.tools.utilities import time2str from easybuild.tools.version import get_git_revision, this_is_easybuild - +from easybuild.tools.py2vs3 import string_type class EasyBlockTest(EnhancedTestCase): """ Baseclass for easyblock testcases """ @@ -318,11 +318,10 @@ def test_make_module_req(self): os.makedirs(eb.installdir) open(os.path.join(eb.installdir, 'foo.jar'), 'w').write('foo.jar') open(os.path.join(eb.installdir, 'bla.jar'), 'w').write('bla.jar') - os.mkdir(os.path.join(eb.installdir, 'bin')) - os.mkdir(os.path.join(eb.installdir, 'bin', 'testdir')) - os.mkdir(os.path.join(eb.installdir, 'sbin')) - os.mkdir(os.path.join(eb.installdir, 'share')) - os.mkdir(os.path.join(eb.installdir, 'share', 'man')) + for path in ('bin', ('bin', 'testdir'), 'sbin', 'share', ('share', 'man'), 'lib', 'lib64'): + if isinstance(path, string_type): + path = (path, ) + os.mkdir(os.path.join(eb.installdir, *path)) # this is not a path that should be picked up os.mkdir(os.path.join(eb.installdir, 'CPATH')) @@ -332,6 +331,7 @@ def test_make_module_req(self): self.assertTrue(re.search(r"^prepend-path\s+CLASSPATH\s+\$root/bla.jar$", guess, re.M)) self.assertTrue(re.search(r"^prepend-path\s+CLASSPATH\s+\$root/foo.jar$", guess, re.M)) self.assertTrue(re.search(r"^prepend-path\s+MANPATH\s+\$root/share/man$", guess, re.M)) + self.assertTrue(re.search(r"^prepend-path\s+CMAKE_PREFIX_PATH\s+\$root$", guess, re.M)) # bin/ is not added to $PATH if it doesn't include files self.assertFalse(re.search(r"^prepend-path\s+PATH\s+\$root/bin$", guess, re.M)) self.assertFalse(re.search(r"^prepend-path\s+PATH\s+\$root/sbin$", guess, re.M)) @@ -341,6 +341,7 @@ def test_make_module_req(self): self.assertTrue(re.search(r'^prepend_path\("CLASSPATH", pathJoin\(root, "bla.jar"\)\)$', guess, re.M)) self.assertTrue(re.search(r'^prepend_path\("CLASSPATH", pathJoin\(root, "foo.jar"\)\)$', guess, re.M)) self.assertTrue(re.search(r'^prepend_path\("MANPATH", pathJoin\(root, "share/man"\)\)$', guess, re.M)) + self.assertTrue('prepend_path("CMAKE_PREFIX_PATH", root)' in guess) # bin/ is not added to $PATH if it doesn't include files self.assertFalse(re.search(r'^prepend_path\("PATH", pathJoin\(root, "bin"\)\)$', guess, re.M)) self.assertFalse(re.search(r'^prepend_path\("PATH", pathJoin\(root, "sbin"\)\)$', guess, re.M)) @@ -361,6 +362,41 @@ def test_make_module_req(self): else: self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax()) + # Check that lib64 is only added to CMAKE_LIBRARY_PATH if there are files in there + # but only if it is not a symlink to lib + # -- No Files + if get_module_syntax() == 'Tcl': + self.assertFalse(re.search(r"^prepend-path\s+CMAKE_LIBRARY_PATH\s+\$root/lib64$", guess, re.M)) + elif get_module_syntax() == 'Lua': + self.assertFalse('prepend_path("CMAKE_LIBRARY_PATH", pathJoin(root, "lib64"))' in guess) + # -- With files + open(os.path.join(eb.installdir, 'lib64', 'libfoo.so'), 'w').write('test') + guess = eb.make_module_req() + if get_module_syntax() == 'Tcl': + self.assertTrue(re.search(r"^prepend-path\s+CMAKE_LIBRARY_PATH\s+\$root/lib64$", guess, re.M)) + elif get_module_syntax() == 'Lua': + self.assertTrue('prepend_path("CMAKE_LIBRARY_PATH", pathJoin(root, "lib64"))' in guess) + # -- With files in lib and lib64 symlinks to lib + open(os.path.join(eb.installdir, 'lib', 'libfoo.so'), 'w').write('test') + shutil.rmtree(os.path.join(eb.installdir, 'lib64')) + os.symlink('lib', os.path.join(eb.installdir, 'lib64')) + guess = eb.make_module_req() + if get_module_syntax() == 'Tcl': + self.assertFalse(re.search(r"^prepend-path\s+CMAKE_LIBRARY_PATH\s+\$root/lib64$", guess, re.M)) + elif get_module_syntax() == 'Lua': + self.assertFalse('prepend_path("CMAKE_LIBRARY_PATH", pathJoin(root, "lib64"))' in guess) + + # With files in /lib and /lib64 symlinked to /lib there should be exactly 1 entry for (LD_)LIBRARY_PATH + # pointing to /lib + for var in ('LIBRARY_PATH', 'LD_LIBRARY_PATH'): + if get_module_syntax() == 'Tcl': + self.assertFalse(re.search(r"^prepend-path\s+%s\s+\$root/lib64$" % var, guess, re.M)) + self.assertEqual(len(re.findall(r"^prepend-path\s+%s\s+\$root/lib$" % var, guess, re.M)), 1) + elif get_module_syntax() == 'Lua': + self.assertFalse(re.search(r'^prepend_path\("%s", pathJoin\(root, "lib64"\)\)$' % var, guess, re.M)) + self.assertEqual(len(re.findall(r'^prepend_path\("%s", pathJoin\(root, "lib"\)\)$' % var, + guess, re.M)), 1) + # check for behavior when a string value is used as dict value by make_module_req_guesses eb.make_module_req_guess = lambda: {'PATH': 'bin'} txt = eb.make_module_req() @@ -786,7 +822,6 @@ def test_extensions_step(self): def test_skip_extensions_step(self): """Test the skip_extensions_step""" - init_config(build_options={'silent': True}) self.contents = cleandoc(""" easyblock = "ConfigureMake" @@ -797,11 +832,12 @@ def test_skip_extensions_step(self): toolchain = SYSTEM exts_list = [ "ext1", - ("ext2", "42", {"source_tmpl": "dummy.tgz"}), + ("EXT-2", "42", {"source_tmpl": "dummy.tgz"}), ("ext3", "1.1", {"source_tmpl": "dummy.tgz", "modulename": "real_ext"}), + "ext4", ] exts_filter = ("\ - if [ %(ext_name)s == 'ext2' ] && [ %(ext_version)s == '42' ] && [[ %(src)s == *dummy.tgz ]];\ + if [ %(ext_name)s == 'ext_2' ] && [ %(ext_version)s == '42' ] && [[ %(src)s == *dummy.tgz ]];\ then exit 0;\ elif [ %(ext_name)s == 'real_ext' ]; then exit 0;\ else exit 1; fi", "") @@ -813,12 +849,30 @@ def test_skip_extensions_step(self): eb.builddir = config.build_path() eb.installdir = config.install_path() eb.skip = True + + self.mock_stdout(True) eb.extensions_step(fetch=True) - # 'ext1' should be in eb.exts - eb_exts = [y for x in eb.exts for y in x.values()] + stdout = self.get_stdout() + self.mock_stdout(False) + + patterns = [ + r"^== skipping extension EXT-2", + r"^== skipping extension ext3", + r"^== installing extension ext1 \(1/2\)\.\.\.", + r"^== installing extension ext4 \(2/2\)\.\.\.", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) + + # 'ext1' should be in eb.ext_instances + eb_exts = [x.name for x in eb.ext_instances] self.assertTrue('ext1' in eb_exts) - # 'ext2' should not - self.assertFalse('ext2' in eb_exts) + # 'EXT-2' should not + self.assertFalse('EXT-2' in eb_exts) + self.assertFalse('EXT_2' in eb_exts) + self.assertFalse('ext-2' in eb_exts) + self.assertFalse('ext_2' in eb_exts) # 'ext3' should not self.assertFalse('ext3' in eb_exts) @@ -1711,11 +1765,11 @@ def run_checks(): # full check also catches checksum issues with extensions res = eb.check_checksums() - self.assertEqual(len(res), 5) + self.assertEqual(len(res), 4) run_checks() idx = 2 - for ext in ['bar', 'barbar', 'toy']: + for ext in ['bar', 'barbar']: expected = "Checksums missing for one or more sources/patches of extension %s in " % ext self.assertTrue(res[idx].startswith(expected)) idx += 1 diff --git a/test/framework/easyconfig.py b/test/framework/easyconfig.py index f956900f59..9332d55708 100644 --- a/test/framework/easyconfig.py +++ b/test/framework/easyconfig.py @@ -64,8 +64,8 @@ from easybuild.tools.config import module_classes from easybuild.tools.configobj import ConfigObj from easybuild.tools.docs import avail_easyconfig_constants, avail_easyconfig_templates -from easybuild.tools.filetools import adjust_permissions, change_dir, copy_file, mkdir, read_file, remove_file -from easybuild.tools.filetools import symlink, write_file +from easybuild.tools.filetools import adjust_permissions, change_dir, copy_file, mkdir, read_file +from easybuild.tools.filetools import remove_dir, remove_file, symlink, write_file from easybuild.tools.module_naming_scheme.toolchain import det_toolchain_compilers, det_toolchain_mpi from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version from easybuild.tools.options import parse_external_modules_metadata @@ -464,7 +464,8 @@ def test_extensions_templates(self): ' "source_tmpl": "%(name)s-%(version_major_minor)s-py%(pymajver)s%(versionsuffix)s.tar.gz",', ' "patches": ["%(name)s-%(version)s_fix-silly-typo-in-printf-statement.patch"],', # use hacky prebuildopts that is picked up by 'EB_Toy' easyblock, to check whether templates are resolved - ' "prebuildopts": "gcc -O2 %(name)s.c -o toy-%(version)s && mv toy-%(version)s toy #",', + ' "prebuildopts": "gcc -O2 %(name)s.c -o toy-%(version)s &&' + + ' mv toy-%(version)s toy # echo installdir is %(installdir)s #",', ' }),', ']', ]) @@ -489,9 +490,12 @@ def test_extensions_templates(self): for patch in toy_ext.patches: patches.append(patch['path']) self.assertEqual(patches, [os.path.join(self.test_prefix, toy_patch_fn)]) + # define actual installation dir + pi_installdir = os.path.join(self.test_installpath, 'software', 'pi', '3.14-test') + expected_prebuildopts = 'gcc -O2 toy.c -o toy-0.0 && mv toy-0.0 toy # echo installdir is %s #' % pi_installdir expected = { 'patches': ['toy-0.0_fix-silly-typo-in-printf-statement.patch'], - 'prebuildopts': 'gcc -O2 toy.c -o toy-0.0 && mv toy-0.0 toy #', + 'prebuildopts': expected_prebuildopts, 'source_tmpl': 'toy-0.0-py3-test.tar.gz', 'source_urls': ['https://pypi.python.org/packages/source/t/toy'], } @@ -500,10 +504,9 @@ def test_extensions_templates(self): # also .cfg of Extension instance was updated correctly self.assertEqual(toy_ext.cfg['source_urls'], ['https://pypi.python.org/packages/source/t/toy']) self.assertEqual(toy_ext.cfg['patches'], [toy_patch_fn]) - self.assertEqual(toy_ext.cfg['prebuildopts'], "gcc -O2 toy.c -o toy-0.0 && mv toy-0.0 toy #") + self.assertEqual(toy_ext.cfg['prebuildopts'], expected_prebuildopts) # check whether files expected to be installed for 'toy' extension are in place - pi_installdir = os.path.join(self.test_installpath, 'software', 'pi', '3.14-test') self.assertTrue(os.path.exists(os.path.join(pi_installdir, 'bin', 'toy'))) self.assertTrue(os.path.exists(os.path.join(pi_installdir, 'lib', 'libtoy.a'))) @@ -958,12 +961,14 @@ def test_templating(self): ' "dirs": ["libfoo.%%s" %% SHLIB_EXT, "lib/%%(arch)s"],', '}', 'dependencies = [', + ' ("CUDA", "10.1.105"),' ' ("Java", "1.7.80"),' ' ("Perl", "5.22.0"),' ' ("Python", "2.7.10"),' ' ("R", "3.2.3"),' ']', 'modloadmsg = "%s"' % '; '.join([ + 'CUDA: %%(cudaver)s, %%(cudamajver)s, %%(cudashortver)s', 'Java: %%(javaver)s, %%(javamajver)s, %%(javashortver)s', 'Python: %%(pyver)s, %%(pymajver)s, %%(pyshortver)s', 'Perl: %%(perlver)s, %%(perlmajver)s, %%(perlshortver)s', @@ -997,7 +1002,11 @@ def test_templating(self): dirs1 = eb['sanity_check_paths']['dirs'][1] self.assertTrue(lib_arch_regex.match(dirs1), "Pattern '%s' matches '%s'" % (lib_arch_regex.pattern, dirs1)) self.assertEqual(eb['homepage'], "http://example.com/P/p/v3/") - expected = "Java: 1.7.80, 1, 1.7; Python: 2.7.10, 2, 2.7; Perl: 5.22.0, 5, 5.22; R: 3.2.3, 3, 3.2" + expected = ("CUDA: 10.1.105, 10, 10.1; " + "Java: 1.7.80, 1, 1.7; " + "Python: 2.7.10, 2, 2.7; " + "Perl: 5.22.0, 5, 5.22; " + "R: 3.2.3, 3, 3.2") self.assertEqual(eb['modloadmsg'], expected) self.assertEqual(eb['license_file'], os.path.join(os.environ['HOME'], 'licenses', 'PI', 'license.txt')) @@ -1005,6 +1014,19 @@ def test_templating(self): eb['description'] = "test easyconfig % %% %s% %%% %(name)s %%(name)s %%%(name)s %%%%(name)s" self.assertEqual(eb['description'], "test easyconfig % %% %s% %%% PI %(name)s %PI %%(name)s") + # test use of %(mpi_cmd_prefix)s template + test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') + gompi_ec = os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0-gompi-2018a.eb') + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, read_file(gompi_ec) + "\nsanity_check_commands = ['%(mpi_cmd_prefix)s toy']") + + ec = EasyConfig(test_ec) + self.assertEqual(ec['sanity_check_commands'], ['mpirun -n 1 toy']) + + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s "}) + ec = EasyConfig(test_ec) + self.assertEqual(ec['sanity_check_commands'], ['mpiexec -np 1 -- toy']) + def test_templating_doc(self): """test templating documentation""" doc = avail_easyconfig_templates() @@ -1429,14 +1451,114 @@ def test_external_dependencies(self): deps = ec.dependencies() self.assertEqual(len(deps), 7) - correct_deps = ['somebuilddep/0.1', 'intel/2018a', 'GCC/6.4.0-2.28', 'foobar/1.2.3', 'test/9.7.5', 'pi/3.14', - 'hidden/.1.2.3'] + correct_deps = ['somebuilddep/0.1', 'intel/2018a', 'GCC/6.4.0-2.28', 'foobar/1.2.3', + 'test/9.7.5', 'pi/3.14', 'hidden/.1.2.3'] self.assertEqual([d['short_mod_name'] for d in deps], correct_deps) self.assertEqual([d['full_mod_name'] for d in deps], correct_deps) self.assertEqual([d['external_module'] for d in deps], [True, False, True, True, True, True, True]) self.assertEqual([d['hidden'] for d in deps], [False, False, False, False, False, False, True]) + # no metadata available for deps + expected = [{}] * len(deps) + self.assertEqual([d['external_module_metadata'] for d in deps], expected) + + # test probing done by handle_external_module_metadata via probe_external_module_metadata, + # by adding a couple of matching module files with some useful data in them + # (use Tcl syntax, so it works with all varieties of module tools) + mod_dir = os.path.join(self.test_prefix, 'modules') + self.modtool.use(mod_dir) + + pi_mod_txt = '\n'.join([ + "#%Module", + "setenv PI_ROOT /software/pi/3.14", + "setenv PI_VERSION 3.14", + ]) + write_file(os.path.join(mod_dir, 'pi/3.14'), pi_mod_txt) + + # foobar module with different version than the one used as an external dep; + # will still be used for probing (as a fallback) + foobar_mod_txt = '\n'.join([ + "#%Module", + "setenv CRAY_FOOBAR_DIR /software/foobar/2.3.4", + "setenv CRAY_FOOBAR_VERSION 2.3.4", + ]) + write_file(os.path.join(mod_dir, 'foobar/2.3.4'), foobar_mod_txt) + + ec = EasyConfig(toy_ec) + deps = ec.dependencies() + + self.assertEqual(len(deps), 7) + + for idx in [0, 1, 2, 4, 6]: + self.assertEqual(deps[idx]['external_module_metadata'], {}) + + self.assertEqual(deps[3]['full_mod_name'], 'foobar/1.2.3') + foobar_metadata = { + 'name': ['foobar'], + 'prefix': '/software/foobar/2.3.4', + 'version': ['2.3.4'], + } + self.assertEqual(deps[3]['external_module_metadata'], foobar_metadata) + self.assertEqual(deps[5]['full_mod_name'], 'pi/3.14') + pi_metadata = { + 'name': ['pi'], + 'prefix': '/software/pi/3.14', + 'version': ['3.14'], + } + self.assertEqual(deps[5]['external_module_metadata'], pi_metadata) + + # provide file with partial metadata for some external modules; + # metadata obtained from probing modules should be added to it... metadata = os.path.join(self.test_prefix, 'external_modules_metadata.cfg') + metadatatxt = '\n'.join([ + '[pi/3.14]', + 'name = PI', + 'version = 3.14.0', + '[foobar]', + 'version = 1.0', + '[foobar/1.2.3]', + 'version = 1.2.3', + '[test]', + 'name = TEST', + ]) + write_file(metadata, metadatatxt) + build_options = { + 'external_modules_metadata': parse_external_modules_metadata([metadata]), + 'valid_module_classes': module_classes(), + } + init_config(build_options=build_options) + ec = EasyConfig(toy_ec) + deps = ec.dependencies() + + self.assertEqual(len(deps), 7) + + for idx in [0, 1, 2, 6]: + self.assertEqual(deps[idx]['external_module_metadata'], {}) + + self.assertEqual(deps[3]['full_mod_name'], 'foobar/1.2.3') + foobar_metadata = { + 'name': ['foobar'], # probed from 'foobar' module + 'prefix': '/software/foobar/2.3.4', # probed from 'foobar' module + 'version': ['1.2.3'], # from [foobar/1.2.3] entry in metadata file + } + self.assertEqual(deps[3]['external_module_metadata'], foobar_metadata) + + self.assertEqual(deps[4]['full_mod_name'], 'test/9.7.5') + self.assertEqual(deps[4]['external_module_metadata'], { + # from [test] entry in metadata file + 'name': ['TEST'], + }) + + self.assertEqual(deps[5]['full_mod_name'], 'pi/3.14') + pi_metadata = { + 'name': ['PI'], # from [pi/3.14] entry in metadata file + 'prefix': '/software/pi/3.14', # probed from 'pi/3.14' module + 'version': ['3.14.0'], # from [pi/3.14] entry in metadata file + } + self.assertEqual(deps[5]['external_module_metadata'], pi_metadata) + + # provide file with full metadata for external modules; + # this data wins over probed metadata from modules (for backwards compatibility) metadatatxt = '\n'.join([ '[pi/3.14]', 'name = PI', @@ -1485,6 +1607,10 @@ def test_external_dependencies(self): } self.assertEqual(ec.dependencies()[5]['external_module_metadata'], metadata) + # get rid of modules first + self.modtool.unuse(mod_dir) + remove_dir(mod_dir) + # check whether $EBROOT*/$EBVERSION* environment variables are defined correctly for external modules os.environ['PI_PREFIX'] = '/test/prefix/PI' os.environ['TEST_INC'] = '/test/prefix/test/include' @@ -1502,6 +1628,57 @@ def test_external_dependencies(self): self.assertEqual(os.environ.get('EBVERSIONPI'), '3.14') self.assertEqual(os.environ.get('EBVERSIONTEST'), '9.7.5') + def test_external_dependencies_templates(self): + """Test use of templates for dependencies marked as external modules.""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + toy_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + toy_ectxt = read_file(toy_ec) + + extra_ectxt = '\n'.join([ + "versionsuffix = '-Python-%(pyver)s-Perl-%(perlshortver)s'", + '', + "dependencies = [", + " ('cray-python/3.6.5.7', EXTERNAL_MODULE),", + " ('perl/5.30.0-1', EXTERNAL_MODULE),", + "]", + ]) + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, toy_ectxt + '\n' + extra_ectxt) + + # put metadata in place so templates can be defined + metadata = os.path.join(self.test_prefix, 'external_modules_metadata.cfg') + metadatatxt = '\n'.join([ + '[cray-python]', + 'name = Python', + '[cray-python/3.6.5.7]', + 'version = 3.6.5', + '[perl/5.30.0-1]', + 'name = Perl', + 'version = 5.30.0', + ]) + write_file(metadata, metadatatxt) + build_options = { + 'external_modules_metadata': parse_external_modules_metadata([metadata]), + 'valid_module_classes': module_classes(), + } + init_config(build_options=build_options) + + ec = EasyConfig(test_ec) + + expected_template_values = { + 'perlmajver': '5', + 'perlshortver': '5.30', + 'perlver': '5.30.0', + 'pymajver': '3', + 'pyshortver': '3.6', + 'pyver': '3.6.5', + } + for key in expected_template_values: + self.assertEqual(ec.template_values[key], expected_template_values[key]) + + self.assertEqual(ec['versionsuffix'], '-Python-3.6.5-Perl-5.30') + def test_update(self): """Test use of update() method for EasyConfig instances.""" topdir = os.path.abspath(os.path.dirname(__file__)) @@ -1515,21 +1692,26 @@ def test_update(self): ec.update('description', "- just a test") self.assertEqual(ec['description'].strip(), "Toy C program, 100% toy. - just a test") - # spaces in between multiple updates for stirng values + # spaces in between multiple updates for string values ec.update('configopts', 'CC="$CC"') ec.update('configopts', 'CXX="$CXX"') self.assertTrue(ec['configopts'].strip().endswith('CC="$CC" CXX="$CXX"')) + # spaces in between multiple updates for string values from list + ec.update('configopts', ['MORE_VALUE', 'EVEN_MORE']) + self.assertTrue(ec['configopts'].strip().endswith('MORE_VALUE EVEN_MORE')) # for list values: extend ec.update('patches', ['foo.patch', 'bar.patch']) toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' self.assertEqual(ec['patches'], [toy_patch_fn, ('toy-extra.txt', 'toy-0.0'), 'foo.patch', 'bar.patch']) - # for unallowed duplicates + # for unallowed duplicates on string values ec.update('configopts', 'SOME_VALUE') configopts_tmp = ec['configopts'] ec.update('configopts', 'SOME_VALUE', allow_duplicate=False) self.assertEqual(ec['configopts'], configopts_tmp) + ec.update('configopts', ['CC="$CC"', 'SOME_VALUE'], allow_duplicate=False) + self.assertEqual(ec['configopts'], configopts_tmp) # for unallowed duplicates when a list is used ec.update('patches', ['foo2.patch', 'bar2.patch']) @@ -1634,6 +1816,59 @@ def test_dump(self): if param in ec: self.assertEqual(ec[param], dumped_ec[param]) + def test_toolchain_hierarchy_aware_dump(self): + """Test that EasyConfig's dump() method is aware of the toolchain hierarchy.""" + test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + build_options = { + 'check_osdeps': False, + 'robot_path': [test_ecs_dir], + 'valid_module_classes': module_classes(), + } + init_config(build_options=build_options) + rawtxt = '\n'.join([ + "easyblock = 'EB_foo'", + '', + "name = 'foo'", + "version = '0.0.1'", + '', + "toolchain = {'name': 'foss', 'version': '2018a'}", + '', + "homepage = 'http://foo.com/'", + 'description = "foo description"', + '', + 'sources = [SOURCE_TAR_GZ]', + 'source_urls = ["http://example.com"]', + 'checksums = ["6af6ab95ce131c2dd467d2ebc8270e9c265cc32496210b069e51d3749f335f3d"]', + '', + "dependencies = [", + " ('toy', '0.0', '', ('gompi', '2018a')),", + " ('bar', '1.0'),", + " ('foobar/1.2.3', EXTERNAL_MODULE),", + "]", + '', + "foo_extra1 = 'foobar'", + '', + 'moduleclass = "tools"', + ]) + + test_ec = os.path.join(self.test_prefix, 'test.eb') + ec = EasyConfig(None, rawtxt=rawtxt) + ecdict = ec.asdict() + ec.dump(test_ec) + # dict representation of EasyConfig instance should not change after dump + self.assertEqual(ecdict, ec.asdict()) + ectxt = read_file(test_ec) + dumped_ec = EasyConfig(test_ec) + self.assertEqual(ecdict, dumped_ec.asdict()) + self.assertTrue(r"'toy', '0.0')," in ectxt) + # test case where we ask for explicit toolchains + ec.dump(test_ec, explicit_toolchains=True) + self.assertEqual(ecdict, ec.asdict()) + ectxt = read_file(test_ec) + dumped_ec = EasyConfig(test_ec) + self.assertEqual(ecdict, dumped_ec.asdict()) + self.assertTrue(r"'toy', '0.0', '', ('gompi', '2018a'))," in ectxt) + def test_dump_order(self): """Test order of easyconfig parameters in dumped easyconfig.""" rawtxt = '\n'.join([ @@ -2713,17 +2948,26 @@ def test_hidden_toolchain(self): def test_categorize_files_by_type(self): """Test categorize_files_by_type""" - self.assertEqual({'easyconfigs': [], 'files_to_delete': [], 'patch_files': []}, categorize_files_by_type([])) + self.assertEqual({'easyconfigs': [], 'files_to_delete': [], 'patch_files': [], 'py_files': []}, + categorize_files_by_type([])) - test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs',) + test_dir = os.path.dirname(os.path.abspath(__file__)) + test_ecs_dir = os.path.join(test_dir, 'easyconfigs') toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' toy_patch = os.path.join(os.path.dirname(test_ecs_dir), 'sandbox', 'sources', 'toy', toy_patch_fn) + + easyblocks_dir = os.path.join(test_dir, 'sandbox', 'easybuild', 'easyblocks') + configuremake = os.path.join(easyblocks_dir, 'generic', 'configuremake.py') + toy_easyblock = os.path.join(easyblocks_dir, 't', 'toy.py') + paths = [ 'bzip2-1.0.6.eb', + toy_easyblock, os.path.join(test_ecs_dir, 'test_ecs', 'g', 'gzip', 'gzip-1.4.eb'), toy_patch, 'foo', ':toy-0.0-deps.eb', + configuremake, ] res = categorize_files_by_type(paths) expected = [ @@ -2734,6 +2978,7 @@ def test_categorize_files_by_type(self): self.assertEqual(res['easyconfigs'], expected) self.assertEqual(res['files_to_delete'], ['toy-0.0-deps.eb']) self.assertEqual(res['patch_files'], [toy_patch]) + self.assertEqual(res['py_files'], [toy_easyblock, configuremake]) def test_resolve_template(self): """Test resolve_template function.""" @@ -2968,12 +3213,19 @@ def test_get_paths_for(self): def test_is_generic_easyblock(self): """Test for is_generic_easyblock function.""" + # is_generic_easyblock in easyconfig.py is deprecated, moved to filetools.py + self.allow_deprecated_behaviour() + + self.mock_stderr(True) + for name in ['Binary', 'ConfigureMake', 'CMakeMake', 'PythonPackage', 'JAR']: self.assertTrue(is_generic_easyblock(name)) for name in ['EB_bzip2', 'EB_DL_underscore_POLY_underscore_Classic', 'EB_GCC', 'EB_WRF_minus_Fire']: self.assertFalse(is_generic_easyblock(name)) + self.mock_stderr(False) + def test_get_module_path(self): """Test get_module_path function.""" self.assertEqual(get_module_path('EB_bzip2', generic=False), 'easybuild.easyblocks.bzip2') diff --git a/test/framework/easyconfigs/test_ecs/g/gzip/gzip-1.4-GCC-4.9.3-2.26.eb b/test/framework/easyconfigs/test_ecs/g/gzip/gzip-1.4-GCC-4.9.3-2.26.eb new file mode 100644 index 0000000000..406f4e3342 --- /dev/null +++ b/test/framework/easyconfigs/test_ecs/g/gzip/gzip-1.4-GCC-4.9.3-2.26.eb @@ -0,0 +1,40 @@ +## +# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild +# +# Copyright:: Copyright (c) 2012-2013 Cyprus Institute / CaSToRC +# Authors:: Thekla Loizou +# License:: MIT/GPL +# $Id$ +# +# This work implements a part of the HPCBIOS project and is a component of the policy: +# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_06-19.html +## +easyblock = 'ConfigureMake' + +name = 'gzip' +version = '1.4' + +homepage = "http://www.gzip.org/" +description = "gzip (GNU zip) is a popular data compression program as a replacement for compress" + +# test toolchain specification +toolchain = {'name': 'GCC', 'version': '4.9.3-2.26'} + +# source tarball filename +sources = ['%(name)s-%(version)s.tar.gz'] + +# download location for source files +source_urls = [GNU_SOURCE] + +# make sure the gzip and gunzip binaries are available after installation +sanity_check_paths = { + 'files': ["bin/gunzip", "bin/gzip"], + 'dirs': [], +} + +# run 'gzip -h' and 'gzip --version' after installation +sanity_check_commands = [True, ('gzip', '--version')] + +software_license = GPLv3 + +moduleclass = 'tools' diff --git a/test/framework/easyconfigs/test_ecs/g/gzip/gzip-1.6-iccifort-2016.1.150-GCC-4.9.3-2.25.eb b/test/framework/easyconfigs/test_ecs/g/gzip/gzip-1.6-iccifort-2016.1.150-GCC-4.9.3-2.25.eb new file mode 100644 index 0000000000..974fbad0c0 --- /dev/null +++ b/test/framework/easyconfigs/test_ecs/g/gzip/gzip-1.6-iccifort-2016.1.150-GCC-4.9.3-2.25.eb @@ -0,0 +1,40 @@ +## +# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild +# +# Copyright:: Copyright (c) 2012-2013 Cyprus Institute / CaSToRC +# Authors:: Thekla Loizou +# License:: MIT/GPL +# $Id$ +# +# This work implements a part of the HPCBIOS project and is a component of the policy: +# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_06-19.html +## +easyblock = 'ConfigureMake' + +name = 'gzip' +version = '1.6' + +homepage = "http://www.gzip.org/" +description = "gzip (GNU zip) is a popular data compression program as a replacement for compress" + +# test toolchain specification +toolchain = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'} + +# source tarball filename +sources = ['%(name)s-%(version)s.tar.gz'] + +# download location for source files +source_urls = [GNU_SOURCE] + +# make sure the gzip and gunzip binaries are available after installation +sanity_check_paths = { + 'files': ["bin/gunzip", "bin/gzip"], + 'dirs': [], +} + +# run 'gzip -h' and 'gzip --version' after installation +sanity_check_commands = [True, ('gzip', '--version')] + +software_license = GPLv3 + +moduleclass = 'tools' diff --git a/test/framework/easyconfigs/test_ecs/h/hwloc/hwloc-1.6.2-GCC-4.9.3-2.26.eb b/test/framework/easyconfigs/test_ecs/h/hwloc/hwloc-1.6.2-GCC-4.9.3-2.26.eb index c445c7d4cb..86e583ee1e 100644 --- a/test/framework/easyconfigs/test_ecs/h/hwloc/hwloc-1.6.2-GCC-4.9.3-2.26.eb +++ b/test/framework/easyconfigs/test_ecs/h/hwloc/hwloc-1.6.2-GCC-4.9.3-2.26.eb @@ -15,7 +15,11 @@ toolchain = {'name': 'GCC', 'version': '4.9.3-2.26'} source_urls = ['http://www.open-mpi.org/software/hwloc/v%(version_major_minor)s/downloads/'] sources = [SOURCE_TAR_GZ] +checksums = ['aa9d9ca75c7d7164f6bf3a52ecd77340eec02c18'] builddependencies = [('binutils', '2.26')] +# introduce fake dependency for testing dep upgrades +dependencies = [('gzip', '1.4')] + moduleclass = 'system' diff --git a/test/framework/easyconfigs/test_ecs/t/toy/toy-0.0-gompi-2018a-test.eb b/test/framework/easyconfigs/test_ecs/t/toy/toy-0.0-gompi-2018a-test.eb index 06d1bc2233..497535850c 100644 --- a/test/framework/easyconfigs/test_ecs/t/toy/toy-0.0-gompi-2018a-test.eb +++ b/test/framework/easyconfigs/test_ecs/t/toy/toy-0.0-gompi-2018a-test.eb @@ -47,6 +47,7 @@ exts_list = [ (name, version, { 'sanity_check_paths': {'files': ['lib/libtoy.a'], 'dirs': []}, 'exts_filter': ("ls -l lib/libtoy.a", ''), + 'checksums': ['44332000aa33b99ad1e00cbd1a7da769220d74647060a10e807b916d73ea27bc'] }), ] diff --git a/test/framework/filetools.py b/test/framework/filetools.py index 640176ee33..6595a7884d 100644 --- a/test/framework/filetools.py +++ b/test/framework/filetools.py @@ -38,6 +38,7 @@ import stat import sys import tempfile +import time from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, init_config from unittest import TextTestRunner @@ -268,7 +269,7 @@ def test_checksums(self): # checksum of length 32 is assumed to be MD5, length 64 to be SHA256, other lengths not allowed # checksum of length other than 32/64 yields an error - error_pattern = "Length of checksum '.*' \(\d+\) does not match with either MD5 \(32\) or SHA256 \(64\)" + error_pattern = r"Length of checksum '.*' \(\d+\) does not match with either MD5 \(32\) or SHA256 \(64\)" for checksum in ['tooshort', 'inbetween32and64charactersisnotgoodeither', known_checksums['sha256'] + 'foo']: self.assertErrorRegex(EasyBuildError, error_pattern, ft.verify_checksum, fp, checksum) @@ -584,7 +585,7 @@ def test_read_write_file(self): txt2 = '\n'.join(['test', '123']) ft.write_file(fp, txt2, append=True) - self.assertEqual(ft.read_file(fp), txt+txt2) + self.assertEqual(ft.read_file(fp), txt + txt2) # test backing up of existing file ft.write_file(fp, 'foo', backup=True) @@ -656,6 +657,16 @@ def test_read_write_file(self): # test use of 'mode' in read_file self.assertEqual(ft.read_file(foo, mode='rb'), b'bar') + def test_is_binary(self): + """Test is_binary function.""" + + for test in ['foo', '', b'foo', b'', "This is just a test", b"This is just a test", b"\xa0"]: + self.assertFalse(ft.is_binary(test)) + + self.assertTrue(ft.is_binary(b'\00')) + self.assertTrue(ft.is_binary(b"File is binary when it includes \00 somewhere")) + self.assertTrue(ft.is_binary(ft.read_file('/bin/ls', mode='rb'))) + def test_det_patched_files(self): """Test det_patched_files function.""" toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' @@ -663,6 +674,18 @@ def test_det_patched_files(self): self.assertEqual(ft.det_patched_files(pf), ['b/toy-0.0/toy.source']) self.assertEqual(ft.det_patched_files(pf, omit_ab_prefix=True), ['toy-0.0/toy.source']) + # create a patch file with a non-UTF8 character in it, should not result in problems + # (see https://github.com/easybuilders/easybuild-framework/issues/3190) + test_patch = os.path.join(self.test_prefix, 'test.patch') + patch_txt = b'\n'.join([ + b"--- foo", + b"+++ foo", + b"- test line", + b"+ test line with non-UTF8 char: '\xa0'", + ]) + ft.write_file(test_patch, patch_txt) + self.assertEqual(ft.det_patched_files(test_patch), ['foo']) + def test_guess_patch_level(self): "Test guess_patch_level.""" # create dummy toy.source file so guess_patch_level can work @@ -1448,6 +1471,27 @@ def test_copy_dir(self): ft.mkdir(testdir) self.assertErrorRegex(EasyBuildError, "Target location .* already exists", ft.copy_dir, to_copy, testdir) + # if the directory already exists and 'dirs_exist_ok' is True, copy_dir should succeed + ft.copy_dir(to_copy, testdir, dirs_exist_ok=True) + self.assertTrue(sorted(os.listdir(to_copy)) == sorted(os.listdir(testdir))) + + # if the directory already exists and 'dirs_exist_ok' is True and there is another named argument (ignore) + # we expect clean error on Python < 3.8 and pass the test on Python >= 3.8 + # NOTE: reused ignore from previous test + def ignore_func(_, names): + return [x for x in names if '6.4.0-2.28' in x] + + shutil.rmtree(testdir) + ft.mkdir(testdir) + if sys.version_info >= (3, 8): + ft.copy_dir(to_copy, testdir, dirs_exist_ok=True, ignore=ignore_func) + self.assertEqual(sorted(os.listdir(testdir)), expected) + self.assertFalse(os.path.exists(os.path.join(testdir, 'GCC-6.4.0-2.28.eb'))) + else: + error_pattern = "Unknown named arguments passed to copy_dir with dirs_exist_ok=True: ignore" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.copy_dir, to_copy, testdir, + dirs_exist_ok=True, ignore=ignore_func) + # also test behaviour of copy_file under --dry-run build_options = { 'extended_dry_run': True, @@ -1652,6 +1696,129 @@ def test_remove(self): ft.adjust_permissions(self.test_prefix, stat.S_IWUSR, add=True) + def test_index_functions(self): + """Test *_index functions.""" + + test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + + # create_index checks whether specified path is an existing directory + doesnotexist = os.path.join(self.test_prefix, 'doesnotexist') + self.assertErrorRegex(EasyBuildError, "Specified path does not exist", ft.create_index, doesnotexist) + + toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') + self.assertErrorRegex(EasyBuildError, "Specified path is not a directory", ft.create_index, toy_ec) + + # load_index just returns None if there is no index in specified directory + self.assertEqual(ft.load_index(self.test_prefix), None) + + # create index for test easyconfigs; + # test with specified path with and without trailing '/'s + for path in [test_ecs, test_ecs + '/', test_ecs + '//']: + index = ft.create_index(path) + self.assertEqual(len(index), 81) + + expected = [ + os.path.join('b', 'bzip2', 'bzip2-1.0.6-GCC-4.9.2.eb'), + os.path.join('t', 'toy', 'toy-0.0.eb'), + os.path.join('s', 'ScaLAPACK', 'ScaLAPACK-2.0.2-gompi-2018a-OpenBLAS-0.2.20.eb'), + ] + for fn in expected: + self.assertTrue(fn in index) + + for fp in index: + self.assertTrue(fp.endswith('.eb')) + + # set up some files to create actual index file for + ft.copy_dir(os.path.join(test_ecs, 'g'), os.path.join(self.test_prefix, 'g')) + + # test dump_index function + index_fp = ft.dump_index(self.test_prefix) + self.assertTrue(os.path.exists(index_fp)) + self.assertTrue(os.path.samefile(self.test_prefix, os.path.dirname(index_fp))) + + datestamp_pattern = r"[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]+" + expected_header = [ + "# created at: " + datestamp_pattern, + "# valid until: " + datestamp_pattern, + ] + expected = [ + os.path.join('g', 'gzip', 'gzip-1.4.eb'), + os.path.join('g', 'GCC', 'GCC-7.3.0-2.30.eb'), + os.path.join('g', 'gompic', 'gompic-2018a.eb'), + ] + index_txt = ft.read_file(index_fp) + for fn in expected_header + expected: + regex = re.compile('^%s$' % fn, re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + + # test load_index function + self.mock_stderr(True) + self.mock_stdout(True) + index = ft.load_index(self.test_prefix) + stderr = self.get_stderr() + stdout = self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertFalse(stderr) + regex = re.compile(r"^== found valid index for %s, so using it\.\.\.$" % self.test_prefix) + self.assertTrue(regex.match(stdout.strip()), "Pattern '%s' matches with: %s" % (regex.pattern, stdout)) + + self.assertEqual(len(index), 26) + for fn in expected: + self.assertTrue(fn in index, "%s should be found in %s" % (fn, sorted(index))) + + # dump_index will not overwrite existing index without force + error_pattern = "File exists, not overwriting it without --force" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.dump_index, self.test_prefix) + + ft.remove_file(index_fp) + + # test creating index file that's infinitely valid + index_fp = ft.dump_index(self.test_prefix, max_age_sec=0) + index_txt = ft.read_file(index_fp) + expected_header[1] = r"# valid until: 9999-12-31 23:59:59\.9+" + for fn in expected_header + expected: + regex = re.compile('^%s$' % fn, re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + + self.mock_stderr(True) + self.mock_stdout(True) + index = ft.load_index(self.test_prefix) + stderr = self.get_stderr() + stdout = self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertFalse(stderr) + regex = re.compile(r"^== found valid index for %s, so using it\.\.\.$" % self.test_prefix) + self.assertTrue(regex.match(stdout.strip()), "Pattern '%s' matches with: %s" % (regex.pattern, stdout)) + + self.assertEqual(len(index), 26) + for fn in expected: + self.assertTrue(fn in index, "%s should be found in %s" % (fn, sorted(index))) + + ft.remove_file(index_fp) + + # test creating index file that's only valid for a (very) short amount of time + index_fp = ft.dump_index(self.test_prefix, max_age_sec=1) + time.sleep(3) + self.mock_stderr(True) + self.mock_stdout(True) + index = ft.load_index(self.test_prefix) + stderr = self.get_stderr() + stdout = self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + self.assertTrue(index is None) + self.assertFalse(stdout) + regex = re.compile(r"WARNING: Index for %s is no longer valid \(too old\), so ignoring it" % self.test_prefix) + self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr)) + + # check whether load_index takes into account --ignore-index + init_config(build_options={'ignore_index': True}) + self.assertEqual(ft.load_index(self.test_prefix), None) + def test_search_file(self): """Test search_file function.""" test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') @@ -1667,6 +1834,15 @@ def test_search_file(self): self.assertTrue(hits[3].endswith('/hwloc-1.6.2-GCC-4.9.3-2.26.eb')) self.assertTrue(hits[4].endswith('/hwloc-1.8-gcccuda-2018a.eb')) + # also test case-sensitive searching + var_defs, hits_bis = ft.search_file([test_ecs], 'HWLOC', silent=True, case_sensitive=True) + self.assertEqual(var_defs, []) + self.assertEqual(hits_bis, []) + + var_defs, hits_bis = ft.search_file([test_ecs], 'hwloc', silent=True, case_sensitive=True) + self.assertEqual(var_defs, []) + self.assertEqual(hits_bis, hits) + # check filename-only mode var_defs, hits = ft.search_file([test_ecs], 'HWLOC', silent=True, filename_only=True) self.assertEqual(var_defs, []) @@ -1800,7 +1976,7 @@ def test_move_file(self): self.mock_stderr(False) # informative message printed, but file was not actually moved - regex = re.compile("^moved file .*/test\.txt to .*/new_test\.txt$") + regex = re.compile(r"^moved file .*/test\.txt to .*/new_test\.txt$") self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) self.assertEqual(stderr, '') @@ -1863,7 +2039,7 @@ def test_diff_files(self): ]) res = ft.diff_files(foo, bar) self.assertTrue(res.endswith(expected), "%s ends with %s" % (res, expected)) - regex = re.compile('^--- .*/foo\s*\n\+\+\+ .*/bar\s*$', re.M) + regex = re.compile(r'^--- .*/foo\s*\n\+\+\+ .*/bar\s*$', re.M) self.assertTrue(regex.search(res), "Pattern '%s' found in: %s" % (regex.pattern, res)) def test_get_source_tarball_from_git(self): @@ -1871,7 +2047,7 @@ def test_get_source_tarball_from_git(self): git_config = { 'repo_name': 'testrepository', - 'url': 'https://github.com/hpcugent', + 'url': 'https://github.com/easybuilders', 'tag': 'master', } target_dir = os.path.join(self.test_prefix, 'target') @@ -1896,7 +2072,7 @@ def test_get_source_tarball_from_git(self): git_config = { 'repo_name': 'testrepository', - 'url': 'git@github.com:hpcugent', + 'url': 'git@github.com:easybuilders', 'tag': 'master', } args = ['test.tar.gz', self.test_prefix, git_config] @@ -1950,46 +2126,56 @@ def run_check(): git_config = { 'repo_name': 'testrepository', - 'url': 'git@github.com:hpcugent', + 'url': 'git@github.com:easybuilders', 'tag': 'master', } expected = '\n'.join([ - ' running command "git clone --branch master git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone --branch master git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() git_config['recursive'] = True expected = '\n'.join([ - ' running command "git clone --branch master --recursive git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone --branch master --recursive git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() + git_config['keep_git_dir'] = True + expected = '\n'.join([ + r' running command "git clone --branch master --recursive git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "tar cfvz .*/target/test.tar.gz testrepository"', + r" \(in .*/tmp.*\)", + ]) + run_check() + del git_config['keep_git_dir'] + del git_config['tag'] git_config['commit'] = '8456f86' expected = '\n'.join([ - ' running command "git clone --recursive git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "git checkout 8456f86 && git submodule update"', - " \(in testrepository\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone --recursive git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "git checkout 8456f86 && git submodule update"', + r" \(in testrepository\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() del git_config['recursive'] expected = '\n'.join([ - ' running command "git clone git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "git checkout 8456f86"', - " \(in testrepository\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "git checkout 8456f86"', + r" \(in testrepository\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() @@ -2004,7 +2190,7 @@ def test_is_sha256_checksum(self): True, 12345, '', - (a_sha256_checksum, ), + (a_sha256_checksum,), [], ]: self.assertFalse(ft.is_sha256_checksum(not_a_sha256_checksum)) @@ -2065,6 +2251,153 @@ def test_fake_vsc(self): from test_fake_vsc import pkgutil self.assertTrue(pkgutil.__file__.endswith('/test_fake_vsc/pkgutil.py')) + def test_is_generic_easyblock(self): + """Test for is_generic_easyblock function.""" + + for name in ['Binary', 'ConfigureMake', 'CMakeMake', 'PythonPackage', 'JAR']: + self.assertTrue(ft.is_generic_easyblock(name)) + + for name in ['EB_bzip2', 'EB_DL_underscore_POLY_underscore_Classic', 'EB_GCC', 'EB_WRF_minus_Fire']: + self.assertFalse(ft.is_generic_easyblock(name)) + + def test_get_easyblock_class_name(self): + """Test for get_easyblock_class_name function.""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + test_ebs = os.path.join(topdir, 'sandbox', 'easybuild', 'easyblocks') + + configuremake = os.path.join(test_ebs, 'generic', 'configuremake.py') + self.assertEqual(ft.get_easyblock_class_name(configuremake), 'ConfigureMake') + + gcc_eb = os.path.join(test_ebs, 'g', 'gcc.py') + self.assertEqual(ft.get_easyblock_class_name(gcc_eb), 'EB_GCC') + + toy_eb = os.path.join(test_ebs, 't', 'toy.py') + self.assertEqual(ft.get_easyblock_class_name(toy_eb), 'EB_toy') + + def test_copy_easyblocks(self): + """Test for copy_easyblocks function.""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + test_ebs = os.path.join(topdir, 'sandbox', 'easybuild', 'easyblocks') + + # easybuild/easyblocks subdirectory must exist in target directory + error_pattern = "Could not find easybuild/easyblocks subdir in .*" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.copy_easyblocks, [], self.test_prefix) + + easyblocks_dir = os.path.join(self.test_prefix, 'easybuild', 'easyblocks') + + # passing empty list works fine + ft.mkdir(easyblocks_dir, parents=True) + res = ft.copy_easyblocks([], self.test_prefix) + self.assertEqual(os.listdir(easyblocks_dir), []) + self.assertEqual(res, {'eb_names': [], 'new': [], 'paths_in_repo': []}) + + # check with different types of easyblocks + configuremake = os.path.join(test_ebs, 'generic', 'configuremake.py') + gcc_eb = os.path.join(test_ebs, 'g', 'gcc.py') + toy_eb = os.path.join(test_ebs, 't', 'toy.py') + test_ebs = [gcc_eb, configuremake, toy_eb] + + # copy them straight into tmpdir first, to check whether correct subdir is derived correctly + ft.copy_files(test_ebs, self.test_prefix) + + # touch empty toy.py easyblock, to check whether 'new' aspect is determined correctly + ft.write_file(os.path.join(easyblocks_dir, 't', 'toy.py'), '') + + # check whether easyblocks were copied as expected, and returned dict is correct + test_ebs = [os.path.join(self.test_prefix, os.path.basename(e)) for e in test_ebs] + res = ft.copy_easyblocks(test_ebs, self.test_prefix) + + self.assertEqual(sorted(res.keys()), ['eb_names', 'new', 'paths_in_repo']) + self.assertEqual(res['eb_names'], ['gcc', 'configuremake', 'toy']) + self.assertEqual(res['new'], [True, True, False]) # toy.py is not new + + self.assertEqual(sorted(os.listdir(easyblocks_dir)), ['g', 'generic', 't']) + + g_dir = os.path.join(easyblocks_dir, 'g') + self.assertEqual(sorted(os.listdir(g_dir)), ['gcc.py']) + copied_gcc_eb = os.path.join(g_dir, 'gcc.py') + self.assertEqual(ft.read_file(copied_gcc_eb), ft.read_file(gcc_eb)) + self.assertTrue(os.path.samefile(res['paths_in_repo'][0], copied_gcc_eb)) + + gen_dir = os.path.join(easyblocks_dir, 'generic') + self.assertEqual(sorted(os.listdir(gen_dir)), ['configuremake.py']) + copied_configuremake = os.path.join(gen_dir, 'configuremake.py') + self.assertEqual(ft.read_file(copied_configuremake), ft.read_file(configuremake)) + self.assertTrue(os.path.samefile(res['paths_in_repo'][1], copied_configuremake)) + + t_dir = os.path.join(easyblocks_dir, 't') + self.assertEqual(sorted(os.listdir(t_dir)), ['toy.py']) + copied_toy_eb = os.path.join(t_dir, 'toy.py') + self.assertEqual(ft.read_file(copied_toy_eb), ft.read_file(toy_eb)) + self.assertTrue(os.path.samefile(res['paths_in_repo'][2], copied_toy_eb)) + + def test_copy_framework_files(self): + """Test for copy_framework_files function.""" + + target_dir = os.path.join(self.test_prefix, 'target') + ft.mkdir(target_dir) + + res = ft.copy_framework_files([], target_dir) + + self.assertEqual(os.listdir(target_dir), []) + self.assertEqual(res, {'paths_in_repo': [], 'new': []}) + + foo_py = os.path.join(self.test_prefix, 'foo.py') + ft.write_file(foo_py, '') + + error_pattern = "Specified path '.*/foo.py' does not include a 'easybuild-framework' directory!" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.copy_framework_files, [foo_py], self.test_prefix) + + # create empty test/framework/modules.py, to check whether 'new' is set correctly in result + ft.write_file(os.path.join(target_dir, 'test', 'framework', 'modules.py'), '') + + topdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + test_files = [ + os.path.join('easybuild', 'tools', 'filetools.py'), + os.path.join('test', 'framework', 'modules.py'), + os.path.join('test', 'framework', 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz'), + ] + expected_entries = ['easybuild', 'test'] + # test/framework/modules.py is not new + expected_new = [True, False, True] + + # we include setup.py conditionally because it may not be there, + # for example when running the tests on an actual easybuild-framework instalation, + # as opposed to when running from a repository checkout... + # setup.py is an important test case, since it has no parent directory + # (it's straight in the easybuild-framework directory) + setup_py = 'setup.py' + if os.path.exists(os.path.join(topdir, setup_py)): + test_files.append(os.path.join(setup_py)) + expected_entries.append(setup_py) + expected_new.append(True) + + # files being copied are expected to be in a directory named 'easybuild-framework', + # so we need to make sure that's the case here as well (may not be in workspace dir on Travis from example) + framework_dir = os.path.join(self.test_prefix, 'easybuild-framework') + for test_file in test_files: + ft.copy_file(os.path.join(topdir, test_file), os.path.join(framework_dir, test_file)) + + test_paths = [os.path.join(framework_dir, f) for f in test_files] + + res = ft.copy_framework_files(test_paths, target_dir) + + self.assertEqual(sorted(os.listdir(target_dir)), sorted(expected_entries)) + + self.assertEqual(sorted(res.keys()), ['new', 'paths_in_repo']) + + for idx, test_file in enumerate(test_files): + orig_path = os.path.join(topdir, test_file) + copied_path = os.path.join(target_dir, test_file) + + self.assertTrue(os.path.exists(copied_path)) + self.assertEqual(ft.read_file(orig_path, mode='rb'), ft.read_file(copied_path, mode='rb')) + + self.assertTrue(os.path.samefile(copied_path, res['paths_in_repo'][idx])) + + self.assertEqual(res['new'], expected_new) def suite(): diff --git a/test/framework/github.py b/test/framework/github.py index 4b4c68c31c..b0b20bfea9 100644 --- a/test/framework/github.py +++ b/test/framework/github.py @@ -37,11 +37,13 @@ from unittest import TextTestRunner from easybuild.base.rest import RestClient +from easybuild.framework.easyconfig.tools import categorize_files_by_type from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.config import module_classes +from easybuild.tools.config import build_option, module_classes from easybuild.tools.configobj import ConfigObj from easybuild.tools.filetools import read_file, write_file from easybuild.tools.github import VALID_CLOSE_PR_REASONS +from easybuild.tools.testing import post_easyconfigs_pr_test_report, session_state from easybuild.tools.py2vs3 import HTTPError, URLError, ascii_letters import easybuild.tools.github as gh @@ -54,8 +56,8 @@ # test account, for which a token may be available GITHUB_TEST_ACCOUNT = 'easybuild_test' -# the user & repo to use in this test (https://github.com/hpcugent/testrepository) -GITHUB_USER = "hpcugent" +# the user & repo to use in this test (https://github.com/easybuilders/testrepository) +GITHUB_USER = "easybuilders" GITHUB_REPO = "testrepository" # branch to test GITHUB_BRANCH = 'master' @@ -220,10 +222,10 @@ def test_close_pr(self): self.mock_stdout(False) patterns = [ - "hpcugent/testrepository PR #2 was submitted by migueldiascosta", + "easybuilders/testrepository PR #2 was submitted by migueldiascosta", "[DRY RUN] Adding comment to testrepository issue #2: '" + "@migueldiascosta, this PR is being closed for the following reason(s): just a test", - "[DRY RUN] Closed hpcugent/testrepository PR #2", + "[DRY RUN] Closed easybuilders/testrepository PR #2", ] for pattern in patterns: self.assertTrue(pattern in stdout, "Pattern '%s' found in: %s" % (pattern, stdout)) @@ -236,15 +238,42 @@ def test_close_pr(self): self.mock_stdout(False) patterns = [ - "hpcugent/testrepository PR #2 was submitted by migueldiascosta", + "easybuilders/testrepository PR #2 was submitted by migueldiascosta", "[DRY RUN] Adding comment to testrepository issue #2: '" + "@migueldiascosta, this PR is being closed for the following reason(s): %s" % retest_msg, - "[DRY RUN] Closed hpcugent/testrepository PR #2", - "[DRY RUN] Reopened hpcugent/testrepository PR #2", + "[DRY RUN] Closed easybuilders/testrepository PR #2", + "[DRY RUN] Reopened easybuilders/testrepository PR #2", ] for pattern in patterns: self.assertTrue(pattern in stdout, "Pattern '%s' found in: %s" % (pattern, stdout)) + def test_fetch_easyblocks_from_pr(self): + """Test fetch_easyblocks_from_pr function.""" + if self.skip_github_tests: + print("Skipping test_fetch_easyblocks_from_pr, no GitHub token available?") + return + + init_config(build_options={ + 'pr_target_account': gh.GITHUB_EB_MAIN, + }) + + # PR with new easyblock plus non-easyblock file + all_ebs_pr1964 = ['lammps.py'] + + # PR with changed easyblock + all_ebs_pr1967 = ['siesta.py'] + + # PR with more than one easyblock + all_ebs_pr1949 = ['configuremake.py', 'rpackage.py'] + + for pr, all_ebs in [(1964, all_ebs_pr1964), (1967, all_ebs_pr1967), (1949, all_ebs_pr1949)]: + try: + tmpdir = os.path.join(self.test_prefix, 'pr%s' % pr) + eb_files = gh.fetch_easyblocks_from_pr(pr, path=tmpdir, github_user=GITHUB_TEST_ACCOUNT) + self.assertEqual(sorted(all_ebs), sorted([os.path.basename(f) for f in eb_files])) + except URLError as err: + print("Ignoring URLError '%s' in test_fetch_easyblocks_from_pr" % err) + def test_fetch_easyconfigs_from_pr(self): """Test fetch_easyconfigs_from_pr function.""" if self.skip_github_tests: @@ -597,7 +626,7 @@ def test_restclient(self): client = RestClient('https://api.github.com', username=GITHUB_TEST_ACCOUNT, token=self.github_token) - status, body = client.repos['hpcugent']['testrepository'].contents.a_directory['a_file.txt'].get() + status, body = client.repos['easybuilders']['testrepository'].contents.a_directory['a_file.txt'].get() self.assertEqual(status, 200) # base64.b64encode requires & produces a 'bytes' value in Python 3, # but we need a string value hence the .decode() (also works in Python 2) @@ -666,6 +695,61 @@ def test_det_account_branch_for_pr(self): self.assertEqual(account, 'migueldiascosta') self.assertEqual(branch, 'fix_inject_checksums') + def test_det_pr_target_repo(self): + """Test det_pr_target_repo.""" + + self.assertEqual(build_option('pr_target_repo'), None) + + # no files => return default target repo (None) + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([])), None) + + # easyconfigs/patches (incl. files to delete) => easyconfigs repo + # this is solely based on filenames, actual files are not opened + test_cases = [ + ['toy.eb'], + ['toy.patch'], + ['toy.eb', 'toy.patch'], + [':toy.eb'], # deleting toy.eb + ['one.eb', 'two.eb'], + ['one.eb', 'two.eb', 'toy.patch', ':todelete.eb'], + ] + for test_case in test_cases: + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(test_case)), 'easybuild-easyconfigs') + + # if only Python files are involved, result is easyblocks or framework repo; + # all Python files are easyblocks => easyblocks repo, otherwise => framework repo; + # files are opened and inspected here to discriminate between easyblocks & other Python files, so must exist! + testdir = os.path.dirname(os.path.abspath(__file__)) + github_py = os.path.join(testdir, 'github.py') + + configuremake = os.path.join(testdir, 'sandbox', 'easybuild', 'easyblocks', 'generic', 'configuremake.py') + self.assertTrue(os.path.exists(configuremake)) + toy_eb = os.path.join(testdir, 'sandbox', 'easybuild', 'easyblocks', 't', 'toy.py') + self.assertTrue(os.path.exists(toy_eb)) + + self.assertEqual(build_option('pr_target_repo'), None) + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([github_py])), 'easybuild-framework') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([configuremake])), 'easybuild-easyblocks') + py_files = [github_py, configuremake] + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'easybuild-framework') + py_files[0] = toy_eb + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'easybuild-easyblocks') + py_files.append(github_py) + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'easybuild-framework') + + # as soon as an easyconfig file or patch files is involved => result is easybuild-easyconfigs repo + for fn in ['toy.eb', 'toy.patch']: + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files + [fn])), 'easybuild-easyconfigs') + + # if --pr-target-repo is specified, we always get this value (no guessing anymore) + init_config(build_options={'pr_target_repo': 'thisisjustatest'}) + + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([])), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(['toy.eb', 'toy.patch'])), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([configuremake])), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([toy_eb])), 'thisisjustatest') + def test_push_branch_to_github(self): """Test push_branch_to_github.""" @@ -695,6 +779,39 @@ def test_push_branch_to_github(self): regex = re.compile(pattern) self.assertTrue(regex.match(stdout.strip()), "Pattern '%s' doesn't match: %s" % (regex.pattern, stdout)) + def test_post_easyconfigs_pr_test_report(self): + """Test for post_easyconfigs_pr_test_report function.""" + if self.skip_github_tests: + print("Skipping test_post_easyconfigs_pr_test_report, no GitHub token available?") + return + + init_config(build_options={ + 'dry_run': True, + 'github_user': GITHUB_TEST_ACCOUNT, + }) + + test_report = os.path.join(self.test_prefix, 'test_report.txt') + write_file(test_report, "This is a test report!") + + init_session_state = session_state() + + self.mock_stderr(True) + self.mock_stdout(True) + post_easyconfigs_pr_test_report('1234', test_report, "OK!", init_session_state, True) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertEqual(stderr, '') + + patterns = [ + r"^\[DRY RUN\] Adding comment to easybuild-easyconfigs issue #1234: 'Test report by @easybuild_test", + r"^See https://gist.github.com/DRY_RUN for a full test report.'", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/modules.py b/test/framework/modules.py index ca3d826e7c..93015b4a07 100644 --- a/test/framework/modules.py +++ b/test/framework/modules.py @@ -42,6 +42,7 @@ import easybuild.tools.modules as mod from easybuild.framework.easyblock import EasyBlock +from easybuild.framework.easyconfig.easyconfig import EasyConfig from easybuild.tools.build_log import EasyBuildError from easybuild.tools.filetools import adjust_permissions, copy_file, copy_dir, mkdir from easybuild.tools.filetools import read_file, remove_dir, remove_file, symlink, write_file @@ -1207,6 +1208,32 @@ def test_modulecmd_strip_source(self): modtool.run_module('load', 'test123') self.assertEqual(os.getenv('TEST123'), 'test123') + def test_get_setenv_value_from_modulefile(self): + """Test for ModulesTool.get_setenv_value_from_modulefile method.""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + eb_path = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, read_file(eb_path)) + write_file(test_ec, "\nmodextravars = {'FOO': 'value with spaces'}", append=True) + + toy_eb = EasyBlock(EasyConfig(test_ec)) + toy_eb.make_module_step() + + expected_root = os.path.join(self.test_installpath, 'software', 'toy', '0.0') + ebroot = self.modtool.get_setenv_value_from_modulefile('toy/0.0', 'EBROOTTOY') + self.assertTrue(os.path.samefile(ebroot, expected_root)) + + ebversion = self.modtool.get_setenv_value_from_modulefile('toy/0.0', 'EBVERSIONTOY') + self.assertEqual(ebversion, '0.0') + + foo = self.modtool.get_setenv_value_from_modulefile('toy/0.0', 'FOO') + self.assertEqual(foo, "value with spaces") + + res = self.modtool.get_setenv_value_from_modulefile('toy/0.0', 'NO_SUCH_VARIABLE_SET') + self.assertEqual(res, None) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/options.py b/test/framework/options.py index ee6d789a14..a755b7d7c4 100644 --- a/test/framework/options.py +++ b/test/framework/options.py @@ -59,6 +59,7 @@ from easybuild.tools.py2vs3 import URLError, reload, sort_looseversions from easybuild.tools.toolchain.utilities import TC_CONST_PREFIX from easybuild.tools.run import run_cmd +from easybuild.tools.systemtools import HAVE_ARCHSPEC from easybuild.tools.version import VERSION from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, init_config @@ -776,6 +777,47 @@ def test_search(self): args = [opt, pattern, '--robot', test_easyconfigs_dir] self.assertErrorRegex(EasyBuildError, "Invalid search query", self.eb_main, args, raise_error=True) + def test_ignore_index(self): + """ + Test use of --ignore-index. + """ + + test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs') + toy_ec = os.path.join(test_ecs_dir, 'test_ecs', 't', 'toy', 'toy-0.0.eb') + copy_file(toy_ec, self.test_prefix) + + toy_ec_list = ['toy-0.0.eb', 'toy-1.2.3.eb', 'toy-4.5.6.eb'] + + # install index that list more files than are actually available, + # so we can check whether it's used + index_txt = '\n'.join(toy_ec_list) + write_file(os.path.join(self.test_prefix, '.eb-path-index'), index_txt) + + args = [ + '--search=toy', + '--robot-paths=%s' % self.test_prefix, + ] + self.mock_stdout(True) + self.eb_main(args, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + for toy_ec_fn in toy_ec_list: + regex = re.compile(re.escape(os.path.join(self.test_prefix, toy_ec_fn)), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + args.append('--ignore-index') + self.mock_stdout(True) + self.eb_main(args, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + regex = re.compile(re.escape(os.path.join(self.test_prefix, 'toy-0.0.eb')), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + for toy_ec_fn in ['toy-1.2.3.eb', 'toy-4.5.6.eb']: + regex = re.compile(re.escape(os.path.join(self.test_prefix, toy_ec_fn)), re.M) + self.assertFalse(regex.search(stdout), "Pattern '%s' should not be found in: %s" % (regex.pattern, stdout)) + def test_search_archived(self): "Test searching for archived easyconfigs" args = ['--search-filename=^intel'] @@ -836,6 +878,16 @@ def test_show_ec(self): def test_copy_ec(self): """Test --copy-ec.""" + def mocked_main(args): + self.mock_stderr(True) + self.mock_stdout(True) + self.eb_main(args, raise_error=True) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + self.assertEqual(stderr, '') + return stdout.strip() + topdir = os.path.dirname(os.path.abspath(__file__)) test_easyconfigs_dir = os.path.join(topdir, 'easyconfigs', 'test_ecs') @@ -845,7 +897,8 @@ def test_copy_ec(self): # basic test: copying one easyconfig file to a non-existing absolute path test_ec = os.path.join(self.test_prefix, 'test.eb') args = ['--copy-ec', 'toy-0.0.eb', test_ec] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, 'toy-0.0.eb copied to %s' % test_ec) self.assertTrue(os.path.exists(test_ec)) self.assertEqual(toy_ec_txt, read_file(test_ec)) @@ -858,7 +911,8 @@ def test_copy_ec(self): self.assertFalse(os.path.exists(target_fn)) args = ['--copy-ec', 'toy-0.0.eb', target_fn] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, 'toy-0.0.eb copied to test.eb') change_dir(cwd) @@ -869,7 +923,8 @@ def test_copy_ec(self): test_target_dir = os.path.join(self.test_prefix, 'test_target_dir') mkdir(test_target_dir) args = ['--copy-ec', 'toy-0.0.eb', test_target_dir] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, 'toy-0.0.eb copied to %s' % test_target_dir) copied_toy_ec = os.path.join(test_target_dir, 'toy-0.0.eb') self.assertTrue(os.path.exists(copied_toy_ec)) @@ -890,7 +945,8 @@ def check_copied_files(): # copying multiple easyconfig files to a non-existing target directory (which is created automatically) args = ['--copy-ec', 'toy-0.0.eb', 'bzip2-1.0.6-GCC-4.9.2.eb', test_target_dir] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, '2 file(s) copied to %s' % test_target_dir) check_copied_files() @@ -901,7 +957,8 @@ def check_copied_files(): args[-1] = os.path.basename(test_target_dir) self.assertFalse(os.path.exists(args[-1])) - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, '2 file(s) copied to test_target_dir') check_copied_files() @@ -912,6 +969,24 @@ def check_copied_files(): error_pattern = ".*/test.eb exists but is not a directory" self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) + # test use of --copy-ec with only one argument: copy to current working directory + test_working_dir = os.path.join(self.test_prefix, 'test_working_dir') + mkdir(test_working_dir) + change_dir(test_working_dir) + self.assertEqual(len(os.listdir(os.getcwd())), 0) + args = ['--copy-ec', 'toy-0.0.eb'] + stdout = mocked_main(args) + regex = re.compile('toy-0.0.eb copied to .*/%s' % os.path.basename(test_working_dir)) + self.assertTrue(regex.match(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) + copied_toy_cwd = os.path.join(test_working_dir, 'toy-0.0.eb') + self.assertTrue(os.path.exists(copied_toy_cwd)) + self.assertEqual(read_file(copied_toy_cwd), toy_ec_txt) + + # --copy-ec without arguments results in a proper error + args = ['--copy-ec'] + error_pattern = "One of more files to copy should be specified!" + self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) + def test_dry_run(self): """Test dry run (long format).""" fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log') @@ -1103,6 +1178,53 @@ def test_try_toolchain_mapping(self): regex = re.compile(anti_pattern, re.M) self.assertFalse(regex.search(outtxt), "Pattern '%s' NOT found in: %s" % (regex.pattern, outtxt)) + def test_try_update_deps(self): + """Test for --try-update-deps.""" + + # first, construct a toy easyconfig that is well suited for testing (multiple deps) + test_ectxt = '\n'.join([ + "easyblock = 'ConfigureMake'", + '', + "name = 'test'", + "version = '1.2.3'", + '' + "homepage = 'https://test.org'", + "description = 'this is just a test'", + '', + "toolchain = {'name': 'GCC', 'version': '4.9.3-2.26'}", + '', + "builddependencies = [('gzip', '1.4')]", + "dependencies = [('hwloc', '1.6.2')]", + ]) + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, test_ectxt) + + args = [ + test_ec, + '--try-toolchain-version=6.4.0-2.28', + '--try-update-deps', + '-D', + ] + + self.assertErrorRegex(EasyBuildError, "Experimental functionality", self.eb_main, args, raise_error=True) + + args.append('--experimental') + outtxt = self.eb_main(args, raise_error=True, do_build=True) + + patterns = [ + # toolchain got updated + r"^ \* \[x\] .*/test_ecs/g/GCC/GCC-6.4.0-2.28.eb \(module: GCC/6.4.0-2.28\)$", + # no version update for gzip (because there's no gzip easyconfig using GCC/6.4.0-2.28 (sub)toolchain) + r"^ \* \[ \] .*/tweaked_dep_easyconfigs/gzip-1.4-GCC-6.4.0-2.28.eb \(module: gzip/1.4-GCC-6.4.0-2.28\)$", + # hwloc was updated to 1.11.8, thanks to available easyconfig + r"^ \* \[x\] .*/test_ecs/h/hwloc/hwloc-1.11.8-GCC-6.4.0-2.28.eb \(module: hwloc/1.11.8-GCC-6.4.0-2.28\)$", + # also generated easyconfig for test/1.2.3 with expected toolchain + r"^ \* \[ \] .*/tweaked_easyconfigs/test-1.2.3-GCC-6.4.0-2.28.eb \(module: test/1.2.3-GCC-6.4.0-2.28\)$", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(outtxt), "Pattern '%s' should be found in: %s" % (regex.pattern, outtxt)) + def test_dry_run_hierarchical(self): """Test dry run using a hierarchical module naming scheme.""" fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log') @@ -1729,17 +1851,17 @@ def test_try(self): test_cases = [ ([], 'toy/0.0'), - # combining --try-toolchain with other build options is too complicated, in this case the code defaults back - # to doing a simple regex substitution on the toolchain - (['--try-software=foo,1.2.3', '--try-toolchain=gompi,2018a'], 'foo/1.2.3-gompi-2018a'), + # try-* only uses the subtoolchain with matching necessary features + (['--try-software=foo,1.2.3', '--try-toolchain=gompi,2018a'], 'foo/1.2.3-GCC-6.4.0-2.28'), (['--try-toolchain-name=gompi', '--try-toolchain-version=2018a'], 'toy/0.0-GCC-6.4.0.2.28'), # --try-toolchain is overridden by --toolchain (['--try-toolchain=gompi,2018a', '--toolchain=system,system'], 'toy/0.0'), + # check we interpret SYSTEM correctly as a toolchain + (['--try-toolchain=SYSTEM'], 'toy/0.0'), + (['--toolchain=SYSTEM'], 'toy/0.0'), (['--try-software-name=foo', '--try-software-version=1.2.3'], 'foo/1.2.3'), (['--try-toolchain-name=gompi', '--try-toolchain-version=2018a'], 'toy/0.0-GCC-6.4.0.2.28'), - # combining --try-toolchain with other build options is too complicated, in this case the code defaults back - # to doing a simple regex substitution on the toolchain - (['--try-software-version=1.2.3', '--try-toolchain=gompi,2018a'], 'toy/1.2.3-gompi-2018a'), + (['--try-software-version=1.2.3', '--try-toolchain=gompi,2018a'], 'toy/1.2.3-GCC-6.4.0.2.28'), (['--try-amend=versionsuffix=-test'], 'toy/0.0-test'), # --try-amend is overridden by --amend (['--amend=versionsuffix=', '--try-amend=versionsuffix=-test'], 'toy/0.0'), @@ -1753,9 +1875,8 @@ def test_try(self): # define extra list-typed parameter (['--try-amend=versionsuffix=-test5', '--try-amend=exts_list=1,2,3'], 'toy/0.0-test5'), # only --try causes other build specs to be included too - # --try-toolchain* has a different branch to all other try options, combining defaults back to regex - (['--try-software=foo,1.2.3', '--toolchain=gompi,2018a'], 'foo/1.2.3-gompi-2018a'), - (['--software=foo,1.2.3', '--try-toolchain=gompi,2018a'], 'foo/1.2.3-gompi-2018a'), + (['--try-software=foo,1.2.3', '--toolchain=gompi,2018a'], 'foo/1.2.3-GCC-6.4.0-2.28'), + (['--software=foo,1.2.3', '--try-toolchain=gompi,2018a'], 'foo/1.2.3-GCC-6.4.0-2.28'), (['--software=foo,1.2.3', '--try-amend=versionsuffix=-test'], 'foo/1.2.3-test'), ] @@ -1822,7 +1943,7 @@ def test_recursive_try(self): outtxt = self.eb_main(args + extra_args, verbose=True, raise_error=True) # toolchain GCC/4.7.2 (subtoolchain of gompi/2018a) should be listed (and present) - tc_regex = re.compile("^ \* \[x\] .*/GCC-6.4.0-2.28.eb \(module: .*GCC/6.4.0-2.28\)$", re.M) + tc_regex = re.compile(r"^ \* \[x\] .*/GCC-6.4.0-2.28.eb \(module: .*GCC/6.4.0-2.28\)$", re.M) self.assertTrue(tc_regex.search(outtxt), "Pattern %s found in %s" % (tc_regex.pattern, outtxt)) # both toy and gzip dependency should be listed with new toolchains @@ -1834,24 +1955,45 @@ def test_recursive_try(self): mod = ec_name.replace('-', '/') else: mod = '%s-GCC-6.4.0-2.28' % ec_name.replace('-', '/') - mod_regex = re.compile("^ \* \[ \] \S+/eb-\S+/%s \(module: .*%s\)$" % (ec, mod), re.M) + mod_regex = re.compile(r"^ \* \[ \] \S+/eb-\S+/%s \(module: .*%s\)$" % (ec, mod), re.M) self.assertTrue(mod_regex.search(outtxt), "Pattern %s found in %s" % (mod_regex.pattern, outtxt)) - # clear fictious dependency + # recursive try also when --(try-)software(-X) is involved + for extra_args in [[], + ['--module-naming-scheme=HierarchicalMNS']]: + outtxt = self.eb_main(args + extra_args + ['--try-software-version=1.2.3'], verbose=True, raise_error=True) + + # toolchain GCC/6.4.0-2.28 (subtoolchain of gompi/2018a) should be listed (and present) + tc_regex = re.compile(r"^ \* \[x\] .*/GCC-6.4.0-2.28.eb \(module: .*GCC/6.4.0-2.28\)$", re.M) + self.assertTrue(tc_regex.search(outtxt), "Pattern %s found in %s" % (tc_regex.pattern, outtxt)) + + # both toy and gzip dependency should be listed with new toolchains + # in this case we map original toolchain `dummy` to the compiler-only GCC subtoolchain of gompi/2018a + # since this subtoolchain already has sufficient capabilities (we do not map higher than necessary) + for ec_name in ['gzip-1.4', 'toy-1.2.3']: + ec = '%s-GCC-6.4.0-2.28.eb' % ec_name + mod = ec_name.replace('-', '/') + if not extra_args: + mod += '-GCC-6.4.0-2.28' + mod_regex = re.compile(r"^ \* \[ \] \S+/eb-\S+/%s \(module: .*%s\)$" % (ec, mod), re.M) + self.assertTrue(mod_regex.search(outtxt), "Pattern %s found in %s" % (mod_regex.pattern, outtxt)) + + # clear fictitious dependency f = open(tweaked_toy_ec, 'a') f.write("dependencies = []\n") f.close() - # no recursive try if --(try-)software(-X) is involved + # no recursive try if --disable-map-toolchains is involved for extra_args in [['--try-software-version=1.2.3'], ['--software-version=1.2.3']]: - outtxt = self.eb_main(args + extra_args, raise_error=True) + outtxt = self.eb_main(args + ['--disable-map-toolchains'] + extra_args, raise_error=True) for mod in ['toy/1.2.3-gompi-2018a', 'gompi/2018a', 'GCC/6.4.0-2.28']: - mod_regex = re.compile("\(module: %s\)$" % mod, re.M) + mod_regex = re.compile(r"\(module: %s\)$" % mod, re.M) self.assertTrue(mod_regex.search(outtxt), "Pattern %s found in %s" % (mod_regex.pattern, outtxt)) for mod in ['gompi/1.2.3', 'GCC/1.2.3']: - mod_regex = re.compile("\(module: %s\)$" % mod, re.M) + mod_regex = re.compile(r"\(module: %s\)$" % mod, re.M) self.assertFalse(mod_regex.search(outtxt), "Pattern %s found in %s" % (mod_regex.pattern, outtxt)) + def test_cleanup_builddir(self): """Test cleaning up of build dir and --disable-cleanup-builddir.""" toy_ec = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') @@ -2434,7 +2576,8 @@ def test_xxx_include_easyblocks(self): self.eb_main(args, logfile=dummylogfn, raise_error=True) logtxt = read_file(self.logfile) - path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks', 'easybuild', 'easyblocks', 'foo.py') + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks', + 'foo.py') foo_regex = re.compile(r"^\|-- EB_foo \(easybuild.easyblocks.foo @ %s\)" % path_pattern, re.M) self.assertTrue(foo_regex.search(logtxt), "Pattern '%s' found in: %s" % (foo_regex.pattern, logtxt)) @@ -2477,7 +2620,7 @@ def test_xxx_include_generic_easyblocks(self): self.eb_main(args, logfile=dummylogfn, raise_error=True) logtxt = read_file(self.logfile) - path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks', 'easybuild', 'easyblocks', + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks', 'generic', 'foobar.py') foo_regex = re.compile(r"^\|-- FooBar \(easybuild.easyblocks.generic.foobar @ %s\)" % path_pattern, re.M) self.assertTrue(foo_regex.search(logtxt), "Pattern '%s' found in: %s" % (foo_regex.pattern, logtxt)) @@ -2515,7 +2658,7 @@ def test_xxx_include_generic_easyblocks(self): logtxt = read_file(self.logfile) mod_pattern = 'easybuild.easyblocks.generic.generictest' - path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks', 'easybuild', 'easyblocks', + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks', 'generic', 'generictest.py') foo_regex = re.compile(r"^\|-- GenericTest \(%s @ %s\)" % (mod_pattern, path_pattern), re.M) self.assertTrue(foo_regex.search(logtxt), "Pattern '%s' found in: %s" % (foo_regex.pattern, logtxt)) @@ -2526,6 +2669,113 @@ def test_xxx_include_generic_easyblocks(self): # 'undo' import of foo easyblock del sys.modules['easybuild.easyblocks.generic.generictest'] + # must be run after test for --list-easyblocks, hence the '_xxx_' + # cleaning up the imported easyblocks is quite difficult... + def test_xxx_include_easyblocks_from_pr(self): + """Test --include-easyblocks-from-pr.""" + if self.github_token is None: + print("Skipping test_preview_pr, no GitHub token available?") + return + + orig_local_sys_path = sys.path[:] + fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log') + os.close(fd) + + # clear log + write_file(self.logfile, '') + + # include extra test easyblock + foo_txt = '\n'.join([ + 'from easybuild.framework.easyblock import EasyBlock', + 'class EB_foo(EasyBlock):', + ' pass', + '' + ]) + write_file(os.path.join(self.test_prefix, 'foo.py'), foo_txt) + + args = [ + '--include-easyblocks=%s/*.py' % self.test_prefix, # this shouldn't interfere + '--include-easyblocks-from-pr=1915', # a PR for CMakeMake easyblock + '--list-easyblocks=detailed', + '--unittest-file=%s' % self.logfile, + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + ] + self.eb_main(args, logfile=dummylogfn, raise_error=True) + logtxt = read_file(self.logfile) + + # easyblock included from pr is found + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks') + cmm_pattern = os.path.join(path_pattern, 'generic', 'cmakemake.py') + cmm_regex = re.compile(r"\|-- CMakeMake \(easybuild.easyblocks.generic.cmakemake @ %s\)" % cmm_pattern, re.M) + self.assertTrue(cmm_regex.search(logtxt), "Pattern '%s' found in: %s" % (cmm_regex.pattern, logtxt)) + + # easyblock is found via get_easyblock_class + klass = get_easyblock_class('CMakeMake') + self.assertTrue(issubclass(klass, EasyBlock), "%s is an EasyBlock derivative class" % klass) + + # 'undo' import of easyblocks + del sys.modules['easybuild.easyblocks.foo'] + del sys.modules['easybuild.easyblocks.generic.cmakemake'] + os.remove(os.path.join(self.test_prefix, 'foo.py')) + sys.path = orig_local_sys_path + import easybuild.easyblocks + reload(easybuild.easyblocks) + import easybuild.easyblocks.generic + reload(easybuild.easyblocks.generic) + + # include test cmakemake easyblock + cmm_txt = '\n'.join([ + 'from easybuild.framework.easyblock import EasyBlock', + 'class CMakeMake(EasyBlock):', + ' pass', + '' + ]) + write_file(os.path.join(self.test_prefix, 'cmakemake.py'), cmm_txt) + + # including the same easyblock twice should fail + args = [ + '--include-easyblocks=%s/cmakemake.py' % self.test_prefix, + '--include-easyblocks-from-pr=1915', + '--list-easyblocks=detailed', + '--unittest-file=%s' % self.logfile, + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + ] + self.assertErrorRegex(EasyBuildError, + "Multiple inclusion of cmakemake.py, check your --include-easyblocks options", + self.eb_main, args, raise_error=True) + + os.remove(os.path.join(self.test_prefix, 'cmakemake.py')) + + # clear log + write_file(self.logfile, '') + + args = [ + '--from-pr=9979', # PR for CMake easyconfig + '--include-easyblocks-from-pr=1936', # PR for EB_CMake easyblock + '--unittest-file=%s' % self.logfile, + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + '--extended-dry-run', + ] + self.eb_main(args, logfile=dummylogfn, raise_error=True) + logtxt = read_file(self.logfile) + + # easyconfig from pr is found + ec_pattern = os.path.join(self.test_prefix, '.*', 'files_pr9979', 'c', 'CMake', + 'CMake-3.16.4-GCCcore-9.2.0.eb') + ec_regex = re.compile(r"Parsing easyconfig file %s" % ec_pattern, re.M) + self.assertTrue(ec_regex.search(logtxt), "Pattern '%s' found in: %s" % (ec_regex.pattern, logtxt)) + + # easyblock included from pr is found + eb_regex = re.compile(r"Successfully obtained EB_CMake class instance from easybuild.easyblocks.cmake", re.M) + self.assertTrue(eb_regex.search(logtxt), "Pattern '%s' found in: %s" % (eb_regex.pattern, logtxt)) + + # easyblock is found via get_easyblock_class + klass = get_easyblock_class('EB_CMake') + self.assertTrue(issubclass(klass, EasyBlock), "%s is an EasyBlock derivative class" % klass) + + # 'undo' import of easyblocks + del sys.modules['easybuild.easyblocks.cmake'] + def mk_eb_test_cmd(self, args): """Construct test command for 'eb' with given options.""" @@ -2973,6 +3223,8 @@ def test_new_branch_github(self): return topdir = os.path.dirname(os.path.abspath(__file__)) + + # test easyconfigs test_ecs = os.path.join(topdir, 'easyconfigs', 'test_ecs') toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') @@ -2987,11 +3239,60 @@ def test_new_branch_github(self): remote = 'git@github.com:%s/easybuild-easyconfigs.git' % GITHUB_TEST_ACCOUNT regexs = [ r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-easyconfigs.git\.\.\.", - r"^== copying easyconfigs to .*/easybuild-easyconfigs\.\.\.", + r"^== copying files to .*/easybuild-easyconfigs\.\.\.", + r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote, + ] + self._assert_regexs(regexs, txt) + + # test easyblocks + test_ebs = os.path.join(topdir, 'sandbox', 'easybuild', 'easyblocks') + toy_eb = os.path.join(test_ebs, 't', 'toy.py') + + args = [ + '--new-branch-github', + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + toy_eb, + '--pr-title="add easyblock for toy"', + '-D', + ] + txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) + + remote = 'git@github.com:%s/easybuild-easyblocks.git' % GITHUB_TEST_ACCOUNT + regexs = [ + r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-easyblocks.git\.\.\.", + r"^== copying files to .*/easybuild-easyblocks\.\.\.", r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote, ] self._assert_regexs(regexs, txt) + # test framework with tweaked copy of test_module_naming_scheme.py + test_mns_py = os.path.join(topdir, 'sandbox', 'easybuild', 'tools', 'module_naming_scheme', + 'test_module_naming_scheme.py') + target_dir = os.path.join(self.test_prefix, 'easybuild-framework', 'test', 'framework', 'sandbox', + 'easybuild', 'tools', 'module_naming_scheme') + mkdir(target_dir, parents=True) + copy_file(test_mns_py, target_dir) + test_mns_py = os.path.join(target_dir, os.path.basename(test_mns_py)) + write_file(test_mns_py, '\n\n', append=True) + + args = [ + '--new-branch-github', + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + test_mns_py, + '--pr-commit-msg="a test"', + '-D', + ] + txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) + + remote = 'git@github.com:%s/easybuild-framework.git' % GITHUB_TEST_ACCOUNT + regexs = [ + r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-framework.git\.\.\.", + r"^== copying files to .*/easybuild-framework\.\.\.", + r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote, + ] + self._assert_regexs(regexs, txt) + + def test_new_pr_from_branch(self): """Test --new-pr-from-branch.""" if self.github_token is None: @@ -3052,7 +3353,7 @@ def test_update_branch_github(self): full_repo = 'boegel/easybuild-easyconfigs' regexs = [ r"^== fetching branch 'develop' from https://github.com/%s.git\.\.\." % full_repo, - r"^== copying easyconfigs to .*/git-working-dir.*/easybuild-easyconfigs...", + r"^== copying files to .*/git-working-dir.*/easybuild-easyconfigs...", r"^== pushing branch 'develop' to remote '.*' \(git@github.com:%s.git\) \[DRY RUN\]" % full_repo, r"^Overview of changes:\n.*/easyconfigs/t/toy/toy-0.0.eb \| 32", r"== pushed updated branch 'develop' to boegel/easybuild-easyconfigs \[DRY RUN\]", @@ -3459,6 +3760,7 @@ def test_merge_pr(self): '4781', # PR for easyconfig for EasyBuild-3.3.0.eb '-D', '--github-user=%s' % GITHUB_TEST_ACCOUNT, + '--pr-target-branch=some_branch', ] # merged PR for EasyBuild-3.3.0.eb, is missing approved review @@ -3466,12 +3768,12 @@ def test_merge_pr(self): expected_stdout = '\n'.join([ "Checking eligibility of easybuilders/easybuild-easyconfigs PR #4781 for merging...", - "* targets develop branch: OK", "* test suite passes: OK", "* last test report is successful: OK", "* milestone is set: OK (3.3.1)", ]) expected_stderr = '\n'.join([ + "* targets some_branch branch: FAILED; found 'develop' => not eligible for merging!", "* approved review: MISSING => not eligible for merging!", '', "WARNING: Review indicates this PR should not be merged (use -f/--force to do so anyway)", @@ -3479,7 +3781,8 @@ def test_merge_pr(self): self.assertEqual(stderr.strip(), expected_stderr) self.assertTrue(stdout.strip().endswith(expected_stdout), "'%s' ends with '%s'" % (stdout, expected_stdout)) - # full eligible merged PR + # full eligible merged PR, default target branch + del args[-1] args[1] = '4832' stdout, stderr = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) @@ -4144,6 +4447,29 @@ def test_check_contrib_non_style(self): for pattern in patterns: self.assertTrue(re.search(pattern, stdout, re.M), "Pattern '%s' found in: %s" % (pattern, stdout)) + # --check-contrib passes if None values are used as checksum, but produces warning + toy = os.path.join(self.test_prefix, 'toy.eb') + copy_file(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'), toy) + toytxt = read_file(toy) + toytxt = toytxt + '\n'.join([ + 'checksums = [', + " None, # toy-0.0.tar.gz", + " # toy-0.0_fix-silly-typo-in-printf-statement.patch", + " '45b5e3f9f495366830e1869bb2b8f4e7c28022739ce48d9f9ebb159b439823c5',", + " '4196b56771140d8e2468fb77f0240bc48ddbf5dabafe0713d612df7fafb1e458', # toy-extra.txt", + ']\n', + ]) + write_file(toy, toytxt) + + args = ['--check-contrib', toy] + self.mock_stdout(True) + self.mock_stderr(True) + self.eb_main(args, raise_error=True) + stderr = self.get_stderr().strip() + self.mock_stdout(False) + self.mock_stderr(False) + self.assertEqual(stderr, "WARNING: Found 1 None checksum value(s), please make sure this is intended!") + def test_allow_use_as_root(self): """Test --allow-use-as-root-and-accept-consequences""" @@ -4592,6 +4918,12 @@ def test_show_system_info(self): "^ -> Python binary: .*/[pP]ython[0-9]?", "^ -> Python version: [0-9.]+", ] + + if HAVE_ARCHSPEC: + patterns.append(r"^ -> arch name: \w+$") + else: + patterns.append(r"^ -> arch name: UNKNOWN \(archspec is not installed\?\)$") + for pattern in patterns: regex = re.compile(pattern, re.M) self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) @@ -4709,6 +5041,51 @@ def test_cuda_compute_capabilities(self): regex = re.compile(r"^cuda-compute-capabilities\s*\(C\)\s*=\s*3\.5, 6\.2, 7\.0$", re.M) self.assertTrue(regex.search(txt), "Pattern '%s' not found in: %s" % (regex.pattern, txt)) + def test_create_index(self): + """Test --create-index option.""" + test_ecs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') + remove_dir(self.test_prefix) + copy_dir(test_ecs, self.test_prefix) + + args = ['--create-index', self.test_prefix] + stdout, stderr = self._run_mock_eb(args, raise_error=True) + + self.assertEqual(stderr, '') + + patterns = [ + r"^Creating index for %s\.\.\.$", + r"^Index created at %s/\.eb-path-index \([0-9]+ files\)$", + ] + for pattern in patterns: + regex = re.compile(pattern % self.test_prefix, re.M) + self.assertTrue(regex.search(stdout), "Pattern %s matches in: %s" % (regex.pattern, stdout)) + + # check contents of index + index_fp = os.path.join(self.test_prefix, '.eb-path-index') + index_txt = read_file(index_fp) + + datestamp_pattern = r"[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]+" + patterns = [ + r"^# created at: " + datestamp_pattern + '$', + r"^# valid until: " + datestamp_pattern + '$', + r"^g/GCC/GCC-7.3.0-2.30.eb", + r"^t/toy/toy-0\.0\.eb", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + + # existing index is not overwritten without --force + error_pattern = "File exists, not overwriting it without --force: .*/.eb-path-index" + self.assertErrorRegex(EasyBuildError, error_pattern, self._run_mock_eb, args, raise_error=True) + + # also test creating index that's infinitely valid + args.extend(['--index-max-age=0', '--force']) + self._run_mock_eb(args, raise_error=True) + index_txt = read_file(index_fp) + regex = re.compile(r"^# valid until: 9999-12-31 23:59:59", re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/repository.py b/test/framework/repository.py index 41a985deb2..b2326c7426 100644 --- a/test/framework/repository.py +++ b/test/framework/repository.py @@ -79,7 +79,7 @@ def test_gitrepo(self): print("(skipping GitRepository test)") return - test_repo_url = 'https://github.com/hpcugent/testrepository' + test_repo_url = 'https://github.com/easybuilders/testrepository' # URL repo = GitRepository(test_repo_url) @@ -122,7 +122,7 @@ def test_svnrepo(self): return # GitHub also supports SVN - test_repo_url = 'https://github.com/hpcugent/testrepository' + test_repo_url = 'https://github.com/easybuilders/testrepository' repo = SvnRepository(test_repo_url) repo.init() diff --git a/test/framework/robot.py b/test/framework/robot.py index fc94a84850..578985ef00 100644 --- a/test/framework/robot.py +++ b/test/framework/robot.py @@ -54,7 +54,7 @@ from easybuild.tools.github import fetch_github_token from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version from easybuild.tools.modules import invalidate_module_caches_for, reset_module_caches -from easybuild.tools.robot import check_conflicts, det_robot_path, resolve_dependencies +from easybuild.tools.robot import check_conflicts, det_robot_path, resolve_dependencies, search_easyconfigs from test.framework.utilities import find_full_path @@ -88,6 +88,10 @@ def show(self, modname): txt = 'Module %s not found' % modname return txt + def get_setenv_value_from_modulefile(self, mod_name, var_name): + """Dummy implementation of get_setenv_value_from_modulefile, always returns None.""" + return None + def mock_module(mod_paths=None): """Get mock module instance.""" @@ -424,14 +428,14 @@ def test_resolve_dependencies_minimal(self): # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", - " ('OpenMPI', '2.1.2'),", # available with GCC/6.4.0-2.28 + # the use of %(version_minor)s here is mainly to check if templates are being handled correctly + # (it doesn't make much sense, but it serves the purpose) + " ('OpenMPI', '%(version_minor)s.1.2'),", # available with GCC/6.4.0-2.28 " ('OpenBLAS', '0.2.20'),", # available with GCC/6.4.0-2.28 " ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.20'),", # available with gompi/2018a " ('SQLite', '3.8.10.2'),", "]", # toolchain as list line, for easy modification later; - # the use of %(version_minor)s here is mainly to check if templates are being handled correctly - # (it doesn't make much sense, but it serves the purpose) "toolchain = {'name': 'foss', 'version': '%(version_minor)s018a'}", ] write_file(barec, '\n'.join(barec_lines)) @@ -1473,6 +1477,75 @@ def test_robot_archived_easyconfigs(self): expected = os.path.join(test_ecs, '__archive__', 'i', 'intel', 'intel-2012a.eb') self.assertTrue(os.path.samefile(res[0]['spec'], expected)) + def test_search_easyconfigs(self): + """Test search_easyconfigs function.""" + test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + init_config(build_options={ + 'robot_path': [test_ecs], + 'search_paths': [self.test_prefix], + }) + + # copy some files to search_paths location + copy_file(os.path.join(test_ecs, 'b', 'binutils', 'binutils-2.25-GCCcore-4.9.3.eb'), self.test_prefix) + copy_file(os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.11.8-GCC-4.6.4.eb'), self.test_prefix) + + paths = search_easyconfigs('binutils-.*-GCCcore-4.9.3', consider_extra_paths=False, print_result=False) + ref_paths = [os.path.join(test_ecs, 'b', 'binutils', x) for x in ['binutils-2.25-GCCcore-4.9.3.eb', + 'binutils-2.26-GCCcore-4.9.3.eb']] + self.assertEqual(len(paths), 2) + self.assertEqual(paths, ref_paths) + + # search_paths location is considered by default + paths = search_easyconfigs('binutils-.*-GCCcore-4.9.3', print_result=False) + self.assertEqual(len(paths), 3) + self.assertEqual(paths[:2], ref_paths) + # last hit is the one from search_paths + self.assertTrue(os.path.samefile(paths[2], os.path.join(self.test_prefix, 'binutils-2.25-GCCcore-4.9.3.eb'))) + + paths = search_easyconfigs('8-gcc', consider_extra_paths=False, print_result=False) + ref_paths = [ + os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.11.8-GCC-4.6.4.eb'), + os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.11.8-GCC-6.4.0-2.28.eb'), + os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.11.8-GCC-7.3.0-2.30.eb'), + os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.8-gcccuda-2018a.eb'), + os.path.join(test_ecs, 'o', 'OpenBLAS', 'OpenBLAS-0.2.8-GCC-4.8.2-LAPACK-3.4.2.eb') + ] + self.assertEqual(paths, ref_paths) + + # now do a case sensitive search + paths = search_easyconfigs('8-gcc', consider_extra_paths=False, print_result=False, case_sensitive=True) + ref_paths = [os.path.join(test_ecs, 'h', 'hwloc', 'hwloc-1.8-gcccuda-2018a.eb')] + self.assertEqual(paths, ref_paths) + + # test use of filename_only + paths = search_easyconfigs('hwloc-1.8', consider_extra_paths=False, print_result=False, filename_only=True) + self.assertEqual(paths, ['hwloc-1.8-gcccuda-2018a.eb']) + + # test use of print_result (enabled by default) + for filename_only in [None, False, True]: + self.mock_stderr(True) + self.mock_stdout(True) + kwargs = {'consider_extra_paths': False} + if filename_only is not None: + kwargs['filename_only'] = filename_only + search_easyconfigs('binutils-.*-GCCcore-4.9.3', **kwargs) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertFalse(stderr) + self.assertEqual(len(stdout.splitlines()), 2) + pattern = [] + for ec_fn in ['binutils-2.25-GCCcore-4.9.3.eb', 'binutils-2.26-GCCcore-4.9.3.eb']: + if filename_only: + path = ec_fn + else: + path = os.path.join('test', 'framework', 'easyconfigs', 'test_ecs', 'b', 'binutils', ec_fn) + pattern.append(r"^ \* .*%s$" % path) + + regex = re.compile('\n'.join(pattern), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/run.py b/test/framework/run.py index a5f1000e05..e7d608c7b2 100644 --- a/test/framework/run.py +++ b/test/framework/run.py @@ -268,6 +268,15 @@ def test_run_cmd_trace(self): init_config(build_options={'trace': True}) + pattern = [ + r"^ >> running command:", + r"\t\[started at: .*\]", + r"\t\[working dir: .*\]", + r"\t\[output logged in .*\]", + r"\techo hello", + r" >> command completed: exit 0, ran in .*", + ] + self.mock_stdout(True) self.mock_stderr(True) (out, ec) = run_cmd("echo hello") @@ -275,13 +284,24 @@ def test_run_cmd_trace(self): stderr = self.get_stderr() self.mock_stdout(False) self.mock_stderr(False) + self.assertEqual(ec, 0) + self.assertEqual(stderr, '') + regex = re.compile('\n'.join(pattern)) + self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) + + # also test with command that is fed input via stdin + self.mock_stdout(True) + self.mock_stderr(True) + (out, ec) = run_cmd('cat', inp='hello') + stdout = self.get_stdout() + stderr = self.get_stderr() + self.mock_stdout(False) + self.mock_stderr(False) + self.assertEqual(ec, 0) self.assertEqual(stderr, '') - pattern = "^ >> running command:\n" - pattern += "\t\[started at: .*\]\n" - pattern += "\t\[output logged in .*\]\n" - pattern += "\techo hello\n" - pattern += ' >> command completed: exit 0, ran in .*' - regex = re.compile(pattern) + pattern.insert(3, r"\t\[input: hello\]") + pattern[-2] = "\tcat" + regex = re.compile('\n'.join(pattern)) self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) # trace output can be disabled on a per-command basis @@ -356,11 +376,12 @@ def test_run_cmd_qa_trace(self): self.mock_stdout(False) self.mock_stderr(False) self.assertEqual(stderr, '') - pattern = "^ >> running interactive command:\n" - pattern += "\t\[started at: .*\]\n" - pattern += "\t\[output logged in .*\]\n" - pattern += "\techo \'n: \'; read n; seq 1 \$n\n" - pattern += ' >> interactive command completed: exit 0, ran in .*' + pattern = r"^ >> running interactive command:\n" + pattern += r"\t\[started at: .*\]\n" + pattern += r"\t\[working dir: .*\]\n" + pattern += r"\t\[output logged in .*\]\n" + pattern += r"\techo \'n: \'; read n; seq 1 \$n\n" + pattern += r' >> interactive command completed: exit 0, ran in .*' self.assertTrue(re.search(pattern, stdout), "Pattern '%s' found in: %s" % (pattern, stdout)) # trace output can be disabled on a per-command basis diff --git a/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py b/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py index da16d01483..af97c3f254 100644 --- a/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py +++ b/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py @@ -32,3 +32,11 @@ class DummyExtension(ExtensionEasyBlock): """Support for building/installing dummy extensions.""" + + def __init__(self, *args, **kwargs): + + super(DummyExtension, self).__init__(*args, **kwargs) + + # use lowercase name as default value for expected module name, and replace '-' with '_' + if 'modulename' not in self.options: + self.options['modulename'] = self.name.lower().replace('-', '_') diff --git a/test/framework/systemtools.py b/test/framework/systemtools.py index fa1f1331cb..bda6813014 100644 --- a/test/framework/systemtools.py +++ b/test/framework/systemtools.py @@ -44,8 +44,8 @@ from easybuild.tools.systemtools import CPU_VENDORS, AMD, APM, ARM, CAVIUM, IBM, INTEL from easybuild.tools.systemtools import MAX_FREQ_FP, PROC_CPUINFO_FP, PROC_MEMINFO_FP from easybuild.tools.systemtools import check_python_version, pick_dep_version -from easybuild.tools.systemtools import det_parallelism, get_avail_core_count, get_cpu_architecture, get_cpu_family -from easybuild.tools.systemtools import get_cpu_features, get_cpu_model, get_cpu_speed, get_cpu_vendor +from easybuild.tools.systemtools import det_parallelism, get_avail_core_count, get_cpu_arch_name, get_cpu_architecture +from easybuild.tools.systemtools import get_cpu_family, get_cpu_features, get_cpu_model, get_cpu_speed, get_cpu_vendor from easybuild.tools.systemtools import get_gcc_version, get_glibc_version, get_os_type, get_os_name, get_os_version from easybuild.tools.systemtools import get_platform_name, get_shared_lib_ext, get_system_info, get_total_memory @@ -338,6 +338,11 @@ def setUp(self): self.orig_platform_uname = st.platform.uname self.orig_get_tool_version = st.get_tool_version self.orig_sys_version_info = st.sys.version_info + self.orig_HAVE_ARCHSPEC = st.HAVE_ARCHSPEC + if hasattr(st, 'archspec_cpu_host'): + self.orig_archspec_cpu_host = st.archspec_cpu_host + else: + self.orig_archspec_cpu_host = None def tearDown(self): """Cleanup after systemtools test.""" @@ -349,6 +354,9 @@ def tearDown(self): st.platform.uname = self.orig_platform_uname st.get_tool_version = self.orig_get_tool_version st.sys.version_info = self.orig_sys_version_info + st.HAVE_ARCHSPEC = self.orig_HAVE_ARCHSPEC + if self.orig_archspec_cpu_host is not None: + st.archspec_cpu_host = self.orig_archspec_cpu_host super(SystemToolsTest, self).tearDown() def test_avail_core_count_native(self): @@ -529,6 +537,27 @@ def test_cpu_architecture(self): MACHINE_NAME = name self.assertEqual(get_cpu_architecture(), machine_names[name]) + def test_cpu_arch_name_native(self): + """Test getting CPU arch name.""" + arch_name = get_cpu_arch_name() + self.assertTrue(isinstance(arch_name, string_type)) + + def test_cpu_arch_name(self): + """Test getting CPU arch name.""" + + class MicroArch(object): + def __init__(self, name): + self.name = name + + st.HAVE_ARCHSPEC = True + st.archspec_cpu_host = lambda: MicroArch('haswell') + arch_name = get_cpu_arch_name() + self.assertEqual(arch_name, 'haswell') + + st.archspec_cpu_host = lambda: None + arch_name = get_cpu_arch_name() + self.assertEqual(arch_name, 'UNKNOWN') + def test_cpu_vendor_native(self): """Test getting CPU vendor.""" cpu_vendor = get_cpu_vendor() diff --git a/test/framework/toolchain.py b/test/framework/toolchain.py index 909bb2f070..2b0fc84634 100644 --- a/test/framework/toolchain.py +++ b/test/framework/toolchain.py @@ -948,6 +948,48 @@ def test_nosuchtoolchain(self): tc = self.get_toolchain('intel', version='1970.01') self.assertErrorRegex(EasyBuildError, "No module found for toolchain", tc.prepare) + def test_mpi_cmd_prefix(self): + """Test mpi_exec_nranks function.""" + self.modtool.prepend_module_path(self.test_prefix) + + tc = self.get_toolchain('gompi', version='2018a') + tc.prepare() + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks=2), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks='2'), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(), "mpirun -n 1") + self.modtool.purge() + + self.setup_sandbox_for_intel_fftw(self.test_prefix) + tc = self.get_toolchain('intel', version='2018a') + tc.prepare() + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks=2), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks='2'), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(), "mpirun -n 1") + self.modtool.purge() + + self.setup_sandbox_for_intel_fftw(self.test_prefix, imklver='10.2.6.038') + tc = self.get_toolchain('intel', version='2012a') + tc.prepare() + + mpi_exec_nranks_re = re.compile("^mpirun --file=.*/mpdboot -machinefile .*/nodes -np 4") + self.assertTrue(mpi_exec_nranks_re.match(tc.mpi_cmd_prefix(nr_ranks=4))) + mpi_exec_nranks_re = re.compile("^mpirun --file=.*/mpdboot -machinefile .*/nodes -np 1") + self.assertTrue(mpi_exec_nranks_re.match(tc.mpi_cmd_prefix())) + + # test specifying custom template for MPI commands + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s", 'silent': True}) + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks="7"), "mpiexec -np 7 --") + self.assertEqual(tc.mpi_cmd_prefix(), "mpiexec -np 1 --") + + # check that we return None when command does not appear at the end of the template + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s option", 'silent': True}) + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks="7"), None) + self.assertEqual(tc.mpi_cmd_prefix(), None) + + # template with extra spaces at the end if fine though + init_config(build_options={'mpi_cmd_template': "mpirun -np %(nr_ranks)s %(cmd)s ", 'silent': True}) + self.assertEqual(tc.mpi_cmd_prefix(), "mpirun -np 1") + def test_mpi_cmd_for(self): """Test mpi_cmd_for function.""" self.modtool.prepend_module_path(self.test_prefix) @@ -974,6 +1016,17 @@ def test_mpi_cmd_for(self): init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s", 'silent': True}) self.assertEqual(tc.mpi_cmd_for('test123', '7'), "mpiexec -np 7 -- test123") + # check whether expected error is raised when a template with missing keys is used; + # %(ranks)s should be %(nr_ranks)s + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(ranks)s -- %(cmd)s", 'silent': True}) + error_pattern = \ + r"Missing templates in mpi-cmd-template value 'mpiexec -np %\(ranks\)s -- %\(cmd\)s': %\(nr_ranks\)s" + self.assertErrorRegex(EasyBuildError, error_pattern, tc.mpi_cmd_for, 'test', 1) + + init_config(build_options={'mpi_cmd_template': "mpirun %(foo)s -np %(nr_ranks)s %(cmd)s", 'silent': True}) + error_pattern = "Failed to complete MPI cmd template .* with .*: KeyError 'foo'" + self.assertErrorRegex(EasyBuildError, error_pattern, tc.mpi_cmd_for, 'test', 1) + def test_prepare_deps(self): """Test preparing for a toolchain when dependencies are involved.""" tc = self.get_toolchain('GCC', version='6.4.0-2.28') diff --git a/test/framework/toy_build.py b/test/framework/toy_build.py index ef3e5d10e1..1570504205 100644 --- a/test/framework/toy_build.py +++ b/test/framework/toy_build.py @@ -34,6 +34,7 @@ import os import re import shutil +import signal import stat import sys import tempfile @@ -745,7 +746,7 @@ def test_toy_hierarchical(self): '--try-toolchain=foss,2018a', # This test was created for the regex substitution of toolchains, to trigger this (rather than subtoolchain # resolution) we must add an additional build option - '--try-amend=parallel=1', + '--disable-map-toolchains', ] self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True) @@ -1231,6 +1232,7 @@ def test_toy_module_fulltxt(self): r'', r'conflict\("toy"\)', r'', + r'prepend_path\("CMAKE_PREFIX_PATH", root\)', r'prepend_path\("LD_LIBRARY_PATH", pathJoin\(root, "lib"\)\)', r'prepend_path\("LIBRARY_PATH", pathJoin\(root, "lib"\)\)', r'prepend_path\("PATH", pathJoin\(root, "bin"\)\)', @@ -1268,6 +1270,7 @@ def test_toy_module_fulltxt(self): r'', r'conflict toy', r'', + r'prepend-path CMAKE_PREFIX_PATH \$root', r'prepend-path LD_LIBRARY_PATH \$root/lib', r'prepend-path LIBRARY_PATH \$root/lib', r'prepend-path PATH \$root/bin', @@ -1413,7 +1416,7 @@ def test_module_only(self): self.assertTrue(os.path.exists(os.path.join(self.test_installpath, 'software', 'toy', '0.0-deps', 'bin'))) modtxt = read_file(toy_mod) self.assertTrue(re.search("set root %s" % prefix, modtxt)) - self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 1) + self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 2) self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1) # install (only) additional module under a hierarchical MNS @@ -1428,7 +1431,7 @@ def test_module_only(self): # existing install is reused modtxt2 = read_file(toy_core_mod) self.assertTrue(re.search("set root %s" % prefix, modtxt2)) - self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 2) + self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 3) self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1) # make sure load statements for dependencies are included @@ -1439,7 +1442,7 @@ def test_module_only(self): os.remove(toy_core_mod) # test installing (only) additional module in Lua syntax (if Lmod is available) - lmod_abspath = which('lmod') + lmod_abspath = os.environ.get('LMOD_CMD') or which('lmod') if lmod_abspath is not None: args = common_args[:-1] + [ '--allow-modules-tool-mismatch', @@ -1453,7 +1456,7 @@ def test_module_only(self): # existing install is reused modtxt3 = read_file(toy_mod + '.lua') self.assertTrue(re.search('local root = "%s"' % prefix, modtxt3)) - self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 2) + self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 3) self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1) # make sure load statements for dependencies are included @@ -2055,7 +2058,7 @@ def test_toy_modaltsoftname(self): self.assertTrue(os.path.exists(os.path.join(modules_path, 'yot', yot_name))) # only subdirectories for software should be created - self.assertEqual(os.listdir(software_path), ['toy']) + self.assertEqual(sorted(os.listdir(software_path)), sorted(['toy', '.locks'])) self.assertEqual(sorted(os.listdir(os.path.join(software_path, 'toy'))), ['0.0-one', '0.0-two']) # only subdirectories for modules with alternative names should be created @@ -2076,17 +2079,24 @@ def test_toy_build_trace(self): self.assertEqual(stderr, '') patterns = [ - "^ >> installation prefix: .*/software/toy/0\.0$", - "^== fetching files\.\.\.\n >> sources:\n >> .*/toy-0\.0\.tar\.gz \[SHA256: 44332000.*\]$", - "^ >> applying patch toy-0\.0_fix-silly-typo-in-printf-statement\.patch$", - "^ >> running command:\n\t\[started at: .*\]\n\t\[output logged in .*\]\n\tgcc toy.c -o toy\n" + - " >> command completed: exit 0, ran in .*", - '^' + '\n'.join([ - "== sanity checking\.\.\.", - " >> file 'bin/yot' or 'bin/toy' found: OK", - " >> \(non-empty\) directory 'bin' found: OK", - ]) + '$', - "^== creating module\.\.\.\n >> generating module file @ .*/modules/all/toy/0\.0(?:\.lua)?$", + r"^ >> installation prefix: .*/software/toy/0\.0$", + r"^== fetching files\.\.\.\n >> sources:\n >> .*/toy-0\.0\.tar\.gz \[SHA256: 44332000.*\]$", + r"^ >> applying patch toy-0\.0_fix-silly-typo-in-printf-statement\.patch$", + r'\n'.join([ + r"^ >> running command:", + r"\t\[started at: .*\]", + r"\t\[working dir: .*\]", + r"\t\[output logged in .*\]", + r"\tgcc toy.c -o toy\n" + r'', + ]), + r" >> command completed: exit 0, ran in .*", + r'^' + r'\n'.join([ + r"== sanity checking\.\.\.", + r" >> file 'bin/yot' or 'bin/toy' found: OK", + r" >> \(non-empty\) directory 'bin' found: OK", + ]) + r'$', + r"^== creating module\.\.\.\n >> generating module file @ .*/modules/all/toy/0\.0(?:\.lua)?$", ] for pattern in patterns: regex = re.compile(pattern, re.M) @@ -2368,6 +2378,10 @@ def test_fix_shebang(self): test_ec_txt = '\n'.join([ toy_ec_txt, "postinstallcmds = [" + # copy of bin/toy to use in fix_python_shebang_for and fix_perl_shebang_for + " 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.python',", + " 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.perl',", + # hardcoded path to bin/python " 'echo \"#!/usr/bin/python\\n# test\" > %(installdir)s/bin/t1.py',", # hardcoded path to bin/python3.6 @@ -2378,6 +2392,12 @@ def test_fix_shebang(self): " 'echo \"#! /usr/bin/env python3\\n# test\" > %(installdir)s/bin/t4.py',", # 'env python3.6' " 'echo \"#!/usr/bin/env python3.6\\n# test\" > %(installdir)s/bin/t5.py',", + # shebang with space, should strip the space + " 'echo \"#! /usr/bin/env python\\n# test\" > %(installdir)s/bin/t6.py',", + # no shebang python + " 'echo \"# test\" > %(installdir)s/bin/t7.py',", + # shebang bash + " 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/b1.sh',", # tests for perl shebang # hardcoded path to bin/perl @@ -2390,19 +2410,34 @@ def test_fix_shebang(self): " 'echo \"#!/usr/bin/perl -w\\n# test\" > %(installdir)s/bin/t4.pl',", # space after #! + 'env perl5' " 'echo \"#!/usr/bin/env perl5\\n# test\" > %(installdir)s/bin/t5.pl',", + # shebang with space, should strip the space + " 'echo \"#! /usr/bin/env perl\\n# test\" > %(installdir)s/bin/t6.pl',", + # no shebang perl + " 'echo \"# test\" > %(installdir)s/bin/t7.pl',", + # shebang bash + " 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/b2.sh',", "]", - "fix_python_shebang_for = ['bin/t1.py', 'bin/*.py', 'nosuchdir/*.py', 'bin/toy']", - "fix_perl_shebang_for = 'bin/*.pl'", + "fix_python_shebang_for = ['bin/t1.py', 'bin/*.py', 'nosuchdir/*.py', 'bin/toy.python', 'bin/b1.sh']", + "fix_perl_shebang_for = ['bin/*.pl', 'bin/b2.sh', 'bin/toy.perl']", ]) write_file(test_ec, test_ec_txt) self.test_toy_build(ec_file=test_ec, raise_error=True) toy_bindir = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin') + # bin/toy and bin/toy2 should *not* be patched, since they're binary files + toy_txt = read_file(os.path.join(toy_bindir, 'toy'), mode='rb') + for fn in ['toy.perl', 'toy.python']: + fn_txt = read_file(os.path.join(toy_bindir, fn), mode='rb') + # no shebang added + self.assertFalse(fn_txt.startswith(b"#!/")) + # exact same file as original binary (untouched) + self.assertEqual(toy_txt, fn_txt) + # no re.M, this should match at start of file! py_shebang_regex = re.compile(r'^#!/usr/bin/env python\n# test$') - for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py']: + for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py', 't6.py', 't7.py']: pybin_path = os.path.join(toy_bindir, pybin) pybin_txt = read_file(pybin_path) self.assertTrue(py_shebang_regex.match(pybin_txt), @@ -2410,12 +2445,20 @@ def test_fix_shebang(self): # no re.M, this should match at start of file! perl_shebang_regex = re.compile(r'^#!/usr/bin/env perl\n# test$') - for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl']: + for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl', 't6.pl', 't7.pl']: perlbin_path = os.path.join(toy_bindir, perlbin) perlbin_txt = read_file(perlbin_path) self.assertTrue(perl_shebang_regex.match(perlbin_txt), "Pattern '%s' found in %s: %s" % (perl_shebang_regex.pattern, perlbin_path, perlbin_txt)) + # There are 2 bash files which shouldn't be influenced by fix_shebang + bash_shebang_regex = re.compile(r'^#!/usr/bin/env bash\n# test$') + for bashbin in ['b1.sh', 'b2.sh']: + bashbin_path = os.path.join(toy_bindir, bashbin) + bashbin_txt = read_file(bashbin_path) + self.assertTrue(bash_shebang_regex.match(bashbin_txt), + "Pattern '%s' found in %s: %s" % (bash_shebang_regex.pattern, bashbin_path, bashbin_txt)) + def test_toy_system_toolchain_alias(self): """Test use of 'system' toolchain alias.""" toy_ec = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') @@ -2474,6 +2517,95 @@ def test_toy_ghost_installdir(self): self.assertFalse(os.path.exists(toy_installdir)) + def test_toy_build_lock(self): + """Test toy installation when a lock is already in place.""" + + locks_dir = os.path.join(self.test_installpath, 'software', '.locks') + toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0') + toy_lock_fn = toy_installdir.replace(os.path.sep, '_') + '.lock' + + toy_lock_path = os.path.join(locks_dir, toy_lock_fn) + mkdir(toy_lock_path, parents=True) + + error_pattern = "Lock .*_software_toy_0.0.lock already exists, aborting!" + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, raise_error=True, verbose=False) + + locks_dir = os.path.join(self.test_prefix, 'locks') + + # no lock in place, so installation proceeds as normal + extra_args = ['--locks-dir=%s' % locks_dir] + self.test_toy_build(extra_args=extra_args, verify=True, raise_error=True) + + # put lock in place in custom locks dir, try again + toy_lock_path = os.path.join(locks_dir, toy_lock_fn) + mkdir(toy_lock_path, parents=True) + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, + extra_args=extra_args, raise_error=True, verbose=False) + + # also test use of --ignore-locks + self.test_toy_build(extra_args=extra_args + ['--ignore-locks'], verify=True, raise_error=True) + + # define a context manager that remove a lock after a while, so we can check the use of --wait-for-lock + class remove_lock_after: + def __init__(self, seconds, lock_fp): + self.seconds = seconds + self.lock_fp = lock_fp + + def remove_lock(self, *args): + remove_dir(self.lock_fp) + + def __enter__(self): + signal.signal(signal.SIGALRM, self.remove_lock) + signal.alarm(self.seconds) + + def __exit__(self, type, value, traceback): + pass + + # wait for lock to be removed, with 1 second interval of checking + extra_args.append('--wait-on-lock=1') + + wait_regex = re.compile("^== lock .*_software_toy_0.0.lock exists, waiting 1 seconds", re.M) + ok_regex = re.compile("^== COMPLETED: Installation ended successfully", re.M) + + self.assertTrue(os.path.exists(toy_lock_path)) + + # use context manager to remove lock after 3 seconds + with remove_lock_after(3, toy_lock_path): + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertEqual(stderr, '') + + wait_matches = wait_regex.findall(stdout) + # we can't rely on an exact number of 'waiting' messages, so let's go with a range... + self.assertTrue(len(wait_matches) in range(2, 5)) + + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + + # when there is no lock in place, --wait-on-lock has no impact + self.assertFalse(os.path.exists(toy_lock_path)) + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertEqual(stderr, '') + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + self.assertFalse(wait_regex.search(stdout), "Pattern '%s' not found in: %s" % (wait_regex.pattern, stdout)) + + # check for clean error on creation of lock + extra_args = ['--locks-dir=/'] + error_pattern = r"Failed to create lock /.*_software_toy_0.0.lock:.* " + error_pattern += r"(Read-only file system|Permission denied)" + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, + extra_args=extra_args, raise_error=True, verbose=False) + def suite(): """ return all the tests in this file """ diff --git a/test/framework/tweak.py b/test/framework/tweak.py index 1b1ee630a2..0797e76de5 100644 --- a/test/framework/tweak.py +++ b/test/framework/tweak.py @@ -36,8 +36,10 @@ from easybuild.framework.easyconfig.parser import EasyConfigParser from easybuild.framework.easyconfig.tweak import find_matching_easyconfigs, obtain_ec_for, pick_version, tweak_one from easybuild.framework.easyconfig.tweak import check_capability_mapping, match_minimum_tc_specs -from easybuild.framework.easyconfig.tweak import get_dep_tree_of_toolchain -from easybuild.framework.easyconfig.tweak import map_toolchain_hierarchies, map_easyconfig_to_target_tc_hierarchy +from easybuild.framework.easyconfig.tweak import get_dep_tree_of_toolchain, map_common_versionsuffixes +from easybuild.framework.easyconfig.tweak import get_matching_easyconfig_candidates, map_toolchain_hierarchies +from easybuild.framework.easyconfig.tweak import find_potential_version_mappings +from easybuild.framework.easyconfig.tweak import map_easyconfig_to_target_tc_hierarchy from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import module_classes from easybuild.tools.filetools import change_dir, write_file @@ -305,16 +307,90 @@ def test_map_toolchain_hierarchies(self): } self.assertEqual(map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool), expected) - def test_map_easyconfig_to_target_tc_hierarchy(self): - """Test mapping of easyconfig to target hierarchy""" + def test_get_matching_easyconfig_candidates(self): + """Test searching for easyconfig candidates based on a stub and toolchain""" test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') init_config(build_options={ - 'robot_path': test_easyconfigs, + 'valid_module_classes': module_classes(), + 'robot_path': [test_easyconfigs], + }) + toolchain = {'name': 'GCC', 'version': '4.9.3-2.26'} + paths, toolchain_suff = get_matching_easyconfig_candidates('gzip-', toolchain) + expected_toolchain_suff = '-GCC-4.9.3-2.26' + self.assertEqual(toolchain_suff, expected_toolchain_suff) + expected_paths = [os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.4' + expected_toolchain_suff + '.eb')] + self.assertEqual(paths, expected_paths) + + paths, toolchain_stub = get_matching_easyconfig_candidates('nosuchmatch', toolchain) + self.assertEqual(paths, []) + self.assertEqual(toolchain_stub, expected_toolchain_suff) + + def test_map_common_versionsuffixes(self): + """Test mapping between two toolchain hierarchies""" + test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + init_config(build_options={ + 'robot_path': [test_easyconfigs], 'silent': True, 'valid_module_classes': module_classes(), }) get_toolchain_hierarchy.clear() + gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} + iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'} + + toolchain_mapping = map_toolchain_hierarchies(iccifort_binutils_tc, gcc_binutils_tc, self.modtool) + possible_mappings = map_common_versionsuffixes('binutils', iccifort_binutils_tc, toolchain_mapping) + self.assertEqual(possible_mappings, {'-binutils-2.25': '-binutils-2.26'}) + + # Make sure we only map upwards, here it's gzip 1.4 in gcc and 1.6 in iccifort + possible_mappings = map_common_versionsuffixes('gzip', iccifort_binutils_tc, toolchain_mapping) + self.assertEqual(possible_mappings, {}) + + # newer gzip is picked up other way around (GCC -> iccifort) + toolchain_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool) + possible_mappings = map_common_versionsuffixes('gzip', gcc_binutils_tc, toolchain_mapping) + self.assertEqual(possible_mappings, {'-gzip-1.4': '-gzip-1.6'}) + + def test_find_potential_version_mappings(self): + """Test ability to find potential version mappings of a dependency for a given toolchain mapping""" + test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + init_config(build_options={ + 'robot_path': [test_easyconfigs], + 'silent': True, + 'valid_module_classes': module_classes(), + }) + get_toolchain_hierarchy.clear() + + gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} + iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'} + # The below mapping includes a binutils mapping (2.26 to 2.25) + tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool) + ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb') + parsed_ec = process_easyconfig(ec_spec)[0] + gzip_dep = [dep for dep in parsed_ec['ec']['dependencies'] if dep['name'] == 'gzip'][0] + self.assertEqual(gzip_dep['full_mod_name'], 'gzip/1.4-GCC-4.9.3-2.26') + + potential_versions = find_potential_version_mappings(gzip_dep, tc_mapping) + self.assertEqual(len(potential_versions), 1) + # Should see version 1.6 of gzip with iccifort toolchain + expected = { + 'path': os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.6-iccifort-2016.1.150-GCC-4.9.3-2.25.eb'), + 'toolchain': {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'}, + 'version': '1.6', + } + self.assertEqual(potential_versions[0], expected) + + def test_map_easyconfig_to_target_tc_hierarchy(self): + """Test mapping of easyconfig to target hierarchy""" + test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + build_options = { + 'robot_path': [test_easyconfigs], + 'silent': True, + 'valid_module_classes': module_classes(), + } + init_config(build_options=build_options) + get_toolchain_hierarchy.clear() + gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'} # The below mapping includes a binutils mapping (2.26 to 2.25) @@ -331,6 +407,81 @@ def test_map_easyconfig_to_target_tc_hierarchy(self): self.assertTrue(key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key]) + # Now test the case where we try to update the dependencies + init_config(build_options=build_options) + get_toolchain_hierarchy.clear() + tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping, update_dep_versions=True) + tweaked_ec = process_easyconfig(tweaked_spec)[0] + tweaked_dict = tweaked_ec['ec'].asdict() + # First check the mapped toolchain + key, value = 'toolchain', iccifort_binutils_tc + self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) + # Also check that binutils has been mapped + for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items(): + self.assertTrue( + key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key] + ) + # Also check that the gzip dependency was upgraded + for key, value in {'name': 'gzip', 'version': '1.6', 'versionsuffix': ''}.items(): + self.assertTrue(key in tweaked_dict['dependencies'][0] and value == tweaked_dict['dependencies'][0][key]) + + # Make sure there are checksums for our next test + self.assertTrue(tweaked_dict['checksums']) + + # Test the case where we also update the software version at the same time + init_config(build_options=build_options) + get_toolchain_hierarchy.clear() + new_version = '1.x.3' + tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, + tc_mapping, + update_build_specs={'version': new_version}, + update_dep_versions=True) + tweaked_ec = process_easyconfig(tweaked_spec)[0] + tweaked_dict = tweaked_ec['ec'].asdict() + # First check the mapped toolchain + key, value = 'toolchain', iccifort_binutils_tc + self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) + # Also check that binutils has been mapped + for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items(): + self.assertTrue( + key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key] + ) + # Also check that the gzip dependency was upgraded + for key, value in {'name': 'gzip', 'version': '1.6', 'versionsuffix': ''}.items(): + self.assertTrue(key in tweaked_dict['dependencies'][0] and value == tweaked_dict['dependencies'][0][key]) + + # Finally check that the version was upgraded + key, value = 'version', new_version + self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) + # and that the checksum was removed + self.assertFalse(tweaked_dict['checksums']) + + # Check that if we update a software version, it also updates the version if the software appears in an + # extension list (like for a PythonBundle) + ec_spec = os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0-gompi-2018a-test.eb') + # Create the trivial toolchain mapping + toolchain = {'name': 'gompi', 'version': '2018a'} + tc_mapping = map_toolchain_hierarchies(toolchain, toolchain, self.modtool) + # Update the software version + init_config(build_options=build_options) + get_toolchain_hierarchy.clear() + new_version = '1.x.3' + tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, + tc_mapping, + update_build_specs={'version': new_version}, + update_dep_versions=False) + tweaked_ec = process_easyconfig(tweaked_spec)[0] + tweaked_dict = tweaked_ec['ec'].asdict() + extensions = tweaked_dict['exts_list'] + # check one extension with the same name exists and that the version has been updated + hit_extension = 0 + for extension in extensions: + if isinstance(extension, tuple) and extension[0] == 'toy': + self.assertEqual(extension[1], new_version) + # Make sure checksum has been purged + self.assertFalse('checksums' in extension[2]) + hit_extension += 1 + self.assertEqual(hit_extension, 1, "Should only have updated one extension") def suite(): """ return all the tests in this file """ diff --git a/test/framework/type_checking.py b/test/framework/type_checking.py index 3dd60dcbd0..b1247832c5 100644 --- a/test/framework/type_checking.py +++ b/test/framework/type_checking.py @@ -658,6 +658,8 @@ def test_to_checksums(self): ['be662daa971a640e40be5c804d9d7d10', ('adler32', '0x998410035'), ('crc32', '0x1553842328'), ('md5', 'be662daa971a640e40be5c804d9d7d10'), ('sha1', 'f618096c52244539d0e89867405f573fdb0b55b0'), ('size', 273)], + # None values should not be filtered out, but left in place + [None, 'fa618be8435447a017fd1bf2c7ae922d0428056cfc7449f7a8641edf76b48265', None], ] for checksums in test_inputs: self.assertEqual(to_checksums(checksums), checksums)