From 63afd0d2cf976c34f3212d91a881fe85e16540b0 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 31 Oct 2019 15:53:54 -0700 Subject: trilinos: temporarily constrain netcdf@:4.7.1 (#13526) remove this when issues are resolved with `trilinos^netcdf@4.7.2` --- var/spack/repos/builtin/packages/trilinos/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 47e1fb1936..3a456ee093 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -297,7 +297,7 @@ class Trilinos(CMakePackage): # MPI related dependencies depends_on('mpi') - depends_on('netcdf+mpi', when="~pnetcdf") + depends_on('netcdf@:4.7.1+mpi', when="~pnetcdf") depends_on('netcdf+mpi+parallel-netcdf', when="+pnetcdf@master,12.12.1:") depends_on('parallel-netcdf', when="+pnetcdf@master,12.12.1:") depends_on('parmetis', when='+metis') -- cgit v1.2.3-70-g09d2 From 94de86aeb800bb2b7af0087a8e3fda12ec95dc79 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 31 Oct 2019 23:56:14 -0700 Subject: elpa: prefer 2016.05.004 until sse/avx/avx2 issues are resolved (#13530) - configuration fails on newer versions when enabling sse/avx/avx2 - prefer an older version that will build with these features --- var/spack/repos/builtin/packages/elpa/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py index 5633594c8f..699431d2b5 100644 --- a/var/spack/repos/builtin/packages/elpa/package.py +++ b/var/spack/repos/builtin/packages/elpa/package.py @@ -21,7 +21,7 @@ class Elpa(AutotoolsPackage): version('2017.05.003', sha256='bccd49ce35a323bd734b17642aed8f2588fea4cc78ee8133d88554753bc3bf1b') version('2017.05.002', sha256='568b71024c094d667b5cbb23045ad197ed5434071152ac608dae490ace5eb0aa') version('2016.11.001.pre', sha256='69b67f0f6faaa2b3b5fd848127b632be32771636d2ad04583c5269d550956f92') - version('2016.05.004', sha256='08c59dc9da458bab856f489d779152e5506e04f0d4b8d6dcf114ca5fbbe46c58') + version('2016.05.004', sha256='08c59dc9da458bab856f489d779152e5506e04f0d4b8d6dcf114ca5fbbe46c58', preferred=True) version('2016.05.003', sha256='c8da50c987351514e61491e14390cdea4bdbf5b09045261991876ed5b433fca4') version('2015.11.001', sha256='c0761a92a31c08a4009c9688c85fc3fc8fde9b6ce05e514c3e1587cf045e9eba') -- cgit v1.2.3-70-g09d2 From 835df4b2e4fe2d3a04dd756ddaa1b2f15f3ebb59 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 29 Oct 2019 13:51:48 -0700 Subject: syaml_int type should use int.__repr__ rather than str.__repr__ (#13487) --- lib/spack/spack/util/spack_yaml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py index 264d5414de..1d92d59ad2 100644 --- a/lib/spack/spack/util/spack_yaml.py +++ b/lib/spack/spack/util/spack_yaml.py @@ -47,7 +47,7 @@ class syaml_str(str): class syaml_int(int): - __repr__ = str.__repr__ + __repr__ = int.__repr__ #: mapping from syaml type -> primitive type -- cgit v1.2.3-70-g09d2 From b727f922a4cbe74d1d4820a9666ca43770dd3ba5 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Wed, 30 Oct 2019 17:16:13 -0500 Subject: cpu: fix clang flags for generic x86_64 (#13491) * cpu: differentiate flags used for pristine LLVM vs. Apple's version --- lib/spack/llnl/util/cpu/microarchitectures.json | 17 ++++++++++++----- lib/spack/spack/test/architecture.py | 7 ++++--- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/lib/spack/llnl/util/cpu/microarchitectures.json b/lib/spack/llnl/util/cpu/microarchitectures.json index 23cff4bf9a..bb3b4db21a 100644 --- a/lib/spack/llnl/util/cpu/microarchitectures.json +++ b/lib/spack/llnl/util/cpu/microarchitectures.json @@ -61,11 +61,18 @@ "flags": "-march={name} -mtune={name}" } ], - "clang": { - "versions": ":", - "family": "x86-64", - "flags": "-march={family} -mcpu=generic" - }, + "clang": [ + { + "versions": "0.0.0-apple:", + "family": "x86-64", + "flags": "-march={family}" + }, + { + "versions": ":", + "family": "x86-64", + "flags": "-march={family} -mcpu=generic" + } + ], "intel": { "versions": ":", "name": "pentium4", diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index 30aba65214..0a37ef9558 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -176,8 +176,9 @@ def test_arch_spec_container_semantic(item, architecture_str): ('gcc@4.7.2', 'ivybridge', '-march=core-avx-i -mtune=core-avx-i'), # Check mixed toolchains ('clang@8.0.0', 'broadwell', ''), + ('clang@3.5', 'x86_64', '-march=x86-64 -mcpu=generic'), # Check clang compilers with 'apple' suffix - ('clang@9.1.0-apple', 'x86_64', '-march=x86-64 -mcpu=generic') + ('clang@9.1.0-apple', 'x86_64', '-march=x86-64') ]) @pytest.mark.filterwarnings("ignore:microarchitecture specific") def test_optimization_flags( @@ -199,9 +200,9 @@ def test_optimization_flags( (spack.spec.CompilerSpec('gcc@4.4.0-special'), '9.2.0', 'icelake', '-march=icelake-client -mtune=icelake-client'), # Check that the special case for Apple's clang is treated correctly - # i.e. it won't try to dtect the version again + # i.e. it won't try to detect the version again (spack.spec.CompilerSpec('clang@9.1.0-apple'), None, 'x86_64', - '-march=x86-64 -mcpu=generic'), + '-march=x86-64'), ]) def test_optimization_flags_with_custom_versions( compiler, real_version, target_str, expected_flags, monkeypatch, config -- cgit v1.2.3-70-g09d2 From 944d7b3d62214b112b7e642999b8247291bb1bbc Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 30 Oct 2019 17:25:46 +0100 Subject: cuda: fix conflict statements for x86-64 targets (#13472) * cuda: fix conflict statements for x86-64 targets fixes #13462 This build system mixin was not updated after the support for specific targets has been merged. * Updated the version range of cuda that conflicts with gcc@8: * Updated the version range of cuda that conflicts with gcc@8: for ppc64le * Relaxed conflicts for version > 10.1 * Updated versions in conflicts Co-Authored-By: Axel Huebl --- lib/spack/spack/build_systems/cuda.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py index 436bc30729..b75807fd06 100644 --- a/lib/spack/spack/build_systems/cuda.py +++ b/lib/spack/spack/build_systems/cuda.py @@ -56,11 +56,12 @@ class CudaPackage(PackageBase): # Linux x86_64 compiler conflicts from here: # https://gist.github.com/ax3l/9489132 - arch_platform = ' arch=x86_64 platform=linux' + arch_platform = ' target=x86_64: platform=linux' conflicts('%gcc@5:', when='+cuda ^cuda@:7.5' + arch_platform) conflicts('%gcc@6:', when='+cuda ^cuda@:8' + arch_platform) conflicts('%gcc@7:', when='+cuda ^cuda@:9.1' + arch_platform) - conflicts('%gcc@8:', when='+cuda ^cuda@10.0.130' + arch_platform) + conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform) + conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform) conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27' + arch_platform) conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5' + arch_platform) conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8' + arch_platform) @@ -82,10 +83,11 @@ class CudaPackage(PackageBase): # https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html # https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html - arch_platform = ' arch=ppc64le platform=linux' + arch_platform = ' target=ppc64le: platform=linux' # information prior to CUDA 9 difficult to find conflicts('%gcc@6:', when='+cuda ^cuda@:9' + arch_platform) - conflicts('%gcc@8:', when='+cuda ^cuda@10.0.130' + arch_platform) + conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform) + conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform) conflicts('%pgi', when='+cuda ^cuda@:8' + arch_platform) conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185' + arch_platform) conflicts('%pgi@:17', when='+cuda ^cuda@:10' + arch_platform) -- cgit v1.2.3-70-g09d2 From 30c9609c4e04e5768ad3ec22d738c73954864e27 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Wed, 30 Oct 2019 18:47:48 -0500 Subject: Documentation: Database.query methods share docstrings (#13515) Currently, query arguments in the Spack core are documented on the Database._query method, where the functionality is defined. For users of the spack python command, this makes the python builtin method help less than ideally useful, as help(spack.store.db.query) and help(spack.store.db.query_local) do not show relevant information. This PR updates the doc attributes for the Database.query and Database.query_local arguments to mirror everything after the first line of the Database._query docstring. --- lib/spack/spack/database.py | 94 +++++++++++++++++++++++++-------------------- 1 file changed, 53 insertions(+), 41 deletions(-) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index a1748fc585..e6e82f9803 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -206,6 +206,50 @@ class ForbiddenLock(object): "Cannot access attribute '{0}' of lock".format(name)) +_query_docstring = """ + + Args: + query_spec: queries iterate through specs in the database and + return those that satisfy the supplied ``query_spec``. If + query_spec is `any`, This will match all specs in the + database. If it is a spec, we'll evaluate + ``spec.satisfies(query_spec)`` + + known (bool or any, optional): Specs that are "known" are those + for which Spack can locate a ``package.py`` file -- i.e., + Spack "knows" how to install them. Specs that are unknown may + represent packages that existed in a previous version of + Spack, but have since either changed their name or + been removed + + installed (bool or any, or InstallStatus or iterable of + InstallStatus, optional): if ``True``, includes only installed + specs in the search; if ``False`` only missing specs, and if + ``any``, all specs in database. If an InstallStatus or iterable + of InstallStatus, returns specs whose install status + (installed, deprecated, or missing) matches (one of) the + InstallStatus. (default: True) + + explicit (bool or any, optional): A spec that was installed + following a specific user request is marked as explicit. If + instead it was pulled-in as a dependency of a user requested + spec it's considered implicit. + + start_date (datetime, optional): filters the query discarding + specs that have been installed before ``start_date``. + + end_date (datetime, optional): filters the query discarding + specs that have been installed after ``end_date``. + + hashes (container): list or set of hashes that we can use to + restrict the search + + Returns: + list of specs that match the query + + """ + + class Database(object): """Per-process lock objects for each install prefix.""" @@ -1158,48 +1202,8 @@ class Database(object): end_date=None, hashes=None ): - """Run a query on the database + """Run a query on the database.""" - Args: - query_spec: queries iterate through specs in the database and - return those that satisfy the supplied ``query_spec``. If - query_spec is `any`, This will match all specs in the - database. If it is a spec, we'll evaluate - ``spec.satisfies(query_spec)`` - - known (bool or any, optional): Specs that are "known" are those - for which Spack can locate a ``package.py`` file -- i.e., - Spack "knows" how to install them. Specs that are unknown may - represent packages that existed in a previous version of - Spack, but have since either changed their name or - been removed - - installed (bool or any, or InstallStatus or iterable of - InstallStatus, optional): if ``True``, includes only installed - specs in the search; if ``False`` only missing specs, and if - ``any``, all specs in database. If an InstallStatus or iterable - of InstallStatus, returns specs whose install status - (installed, deprecated, or missing) matches (one of) the - InstallStatus. (default: True) - - explicit (bool or any, optional): A spec that was installed - following a specific user request is marked as explicit. If - instead it was pulled-in as a dependency of a user requested - spec it's considered implicit. - - start_date (datetime, optional): filters the query discarding - specs that have been installed before ``start_date``. - - end_date (datetime, optional): filters the query discarding - specs that have been installed after ``end_date``. - - hashes (container): list or set of hashes that we can use to - restrict the search - - Returns: - list of specs that match the query - - """ # TODO: Specs are a lot like queries. Should there be a # TODO: wildcard spec object, and should specs have attributes # TODO: like installed and known that can be queried? Or are @@ -1246,11 +1250,17 @@ class Database(object): return results + _query.__doc__ += _query_docstring + def query_local(self, *args, **kwargs): + """Query only the local Spack database.""" with self.read_transaction(): return sorted(self._query(*args, **kwargs)) + query_local.__doc__ += _query_docstring + def query(self, *args, **kwargs): + """Query the Spack database including all upstream databases.""" upstream_results = [] for upstream_db in self.upstream_dbs: # queries for upstream DBs need to *not* lock - we may not @@ -1265,6 +1275,8 @@ class Database(object): return sorted(results) + query.__doc__ += _query_docstring + def query_one(self, query_spec, known=any, installed=True): """Query for exactly one spec that matches the query spec. -- cgit v1.2.3-70-g09d2 From 338a532e07357c93fe9972dcca60e197f94ec20b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 31 Oct 2019 14:20:46 -0500 Subject: Travis CI: Test Python 3.8 (#13347) * Travis CI: Test Python 3.8 * Fix use of deprecated cgi.escape method * Fix version comparison * Fix flake8 F811 change in Python 3.8 * Make flake8 happy * Use Python 3.8 for all test categories --- .travis.yml | 26 +++++++++++++++----------- lib/spack/docs/getting_started.rst | 2 +- lib/spack/spack/cmd/flake8.py | 12 ++++++++++++ lib/spack/spack/cmd/list.py | 10 +++++++--- 4 files changed, 35 insertions(+), 15 deletions(-) diff --git a/.travis.yml b/.travis.yml index a79a5126b1..f8f0778cae 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,12 +18,12 @@ jobs: fast_finish: true include: - stage: 'style checks' - python: '3.7' + python: '3.8' os: linux language: python env: TEST_SUITE=flake8 # Shell integration with module files - - python: '3.7' + - python: '3.8' os: linux language: python env: [ TEST_SUITE=bootstrap ] @@ -46,10 +46,14 @@ jobs: language: python env: TEST_SUITE=unit - python: '3.7' + os: linux + language: python + env: TEST_SUITE=unit + - python: '3.8' os: linux language: python env: [ TEST_SUITE=unit, COVERAGE=true ] - - python: '3.7' + - python: '3.8' os: linux language: python env: TEST_SUITE=doc @@ -64,41 +68,41 @@ jobs: language: python env: [ TEST_SUITE=build, 'SPEC=mpich' ] # astyle (MakefilePackage) - - python: '3.7' + - python: '3.8' os: linux language: python env: [ TEST_SUITE=build, 'SPEC=astyle' ] # tut (WafPackage) - - python: '3.7' + - python: '3.8' os: linux language: python env: [ TEST_SUITE=build, 'SPEC=tut' ] # py-setuptools (PythonPackage) - - python: '3.7' + - python: '3.8' os: linux language: python env: [ TEST_SUITE=build, 'SPEC=py-setuptools' ] # perl-dbi (PerlPackage) -# - python: '2.7' +# - python: '3.8' # os: linux # language: python # env: [ TEST_SUITE=build, 'SPEC=perl-dbi' ] # openjpeg (CMakePackage + external cmake) - - python: '3.7' + - python: '3.8' os: linux language: python env: [ TEST_SUITE=build, 'SPEC=openjpeg' ] # r-rcpp (RPackage + external R) - - python: '3.7' + - python: '3.8' os: linux language: python env: [ TEST_SUITE=build, 'SPEC=r-rcpp' ] # mpich (AutotoolsPackage) - - python: '3.7' + - python: '3.8' os: linux language: python env: [ TEST_SUITE=build, 'SPEC=mpich' ] - - python: '3.6' + - python: '3.8' stage: 'docker build' os: linux language: python diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index b7ba386bb2..7f3a34b62b 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -16,7 +16,7 @@ Prerequisites Spack has the following minimum requirements, which must be installed before Spack is run: -#. Python 2 (2.6 or 2.7) or 3 (3.4 - 3.7) to run Spack +#. Python 2 (2.6 or 2.7) or 3 (3.5 - 3.8) to run Spack #. A C/C++ compiler for building #. The ``make`` executable for building #. The ``git`` and ``curl`` commands for fetching diff --git a/lib/spack/spack/cmd/flake8.py b/lib/spack/spack/cmd/flake8.py index 61f7dd567f..cc5b168c4b 100644 --- a/lib/spack/spack/cmd/flake8.py +++ b/lib/spack/spack/cmd/flake8.py @@ -178,6 +178,12 @@ def add_pattern_exemptions(line, codes): def filter_file(source, dest, output=False): """Filter a single file through all the patterns in pattern_exemptions.""" + + # Prior to Python 3.8, `noqa: F811` needed to be placed on the `@when` line + # Starting with Python 3.8, it must be placed on the `def` line + # https://gitlab.com/pycqa/flake8/issues/583 + ignore_f811_on_previous_line = False + with open(source) as infile: parent = os.path.dirname(dest) mkdirp(parent) @@ -197,6 +203,12 @@ def filter_file(source, dest, output=False): line_errors.append(code) break + if 'F811' in line_errors: + ignore_f811_on_previous_line = True + elif ignore_f811_on_previous_line: + line_errors.append('F811') + ignore_f811_on_previous_line = False + if line_errors: line = add_pattern_exemptions(line, line_errors) diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py index 570dcc7e27..fe6983ce74 100644 --- a/lib/spack/spack/cmd/list.py +++ b/lib/spack/spack/cmd/list.py @@ -7,7 +7,6 @@ from __future__ import print_function from __future__ import division import argparse -import cgi import fnmatch import os import re @@ -23,6 +22,11 @@ import spack.repo import spack.cmd.common.arguments as arguments from spack.version import VersionList +if sys.version_info > (3, 1): + from html import escape +else: + from cgi import escape + description = "list and search available packages" section = "basic" level = "short" @@ -217,7 +221,7 @@ def html(pkg_names, out): out.write('
    \n') out.write(('
  • ' '%s' - '
  • \n') % (pkg.homepage, cgi.escape(pkg.homepage))) + '\n') % (pkg.homepage, escape(pkg.homepage, True))) out.write('
\n') out.write('
Spack package:
\n') @@ -249,7 +253,7 @@ def html(pkg_names, out): out.write('
Description:
\n') out.write('
\n') - out.write(cgi.escape(pkg.format_doc(indent=2))) + out.write(escape(pkg.format_doc(indent=2), True)) out.write('\n') out.write('
\n') out.write('\n') -- cgit v1.2.3-70-g09d2 From 1cc69e1ce09b7f93bf9f36fcd0ef343909e4044c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 1 Nov 2019 11:36:28 +0100 Subject: targets: print a warning message before downgrading (#13513) * Make package preferences a soft failure for targets, instead of a hard failure. * Added unit tests for preferences expressed via packages.yaml --- lib/spack/spack/concretize.py | 59 +++++++++++++++++++++++++------------- lib/spack/spack/test/concretize.py | 23 +++++++++++---- 2 files changed, 57 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index a31529e0ea..417afed35b 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -281,25 +281,7 @@ class Concretizer(object): else: # To get default platform, consider package prefs if PackagePrefs.has_preferred_targets(spec.name): - target_prefs = PackagePrefs(spec.name, 'target') - target_specs = [spack.spec.Spec('target=%s' % tname) - for tname in cpu.targets] - - def tspec_filter(s): - # Filter target specs by whether the architecture - # family is the current machine type. This ensures - # we only consider x86_64 targets when on an - # x86_64 machine, etc. This may need to change to - # enable setting cross compiling as a default - target = cpu.targets[str(s.architecture.target)] - arch_family_name = target.family.name - return arch_family_name == platform.machine() - - # Sort filtered targets by package prefs - target_specs = list(filter(tspec_filter, target_specs)) - target_specs.sort(key=target_prefs) - - new_target = target_specs[0].architecture.target + new_target = self.target_from_package_preferences(spec) else: new_target = new_plat.target('default_target') @@ -310,6 +292,33 @@ class Concretizer(object): spec.architecture = new_arch return spec_changed + def target_from_package_preferences(self, spec): + """Returns the preferred target from the package preferences if + there's any. + + Args: + spec: abstract spec to be concretized + """ + target_prefs = PackagePrefs(spec.name, 'target') + target_specs = [spack.spec.Spec('target=%s' % tname) + for tname in cpu.targets] + + def tspec_filter(s): + # Filter target specs by whether the architecture + # family is the current machine type. This ensures + # we only consider x86_64 targets when on an + # x86_64 machine, etc. This may need to change to + # enable setting cross compiling as a default + target = cpu.targets[str(s.architecture.target)] + arch_family_name = target.family.name + return arch_family_name == platform.machine() + + # Sort filtered targets by package prefs + target_specs = list(filter(tspec_filter, target_specs)) + target_specs.sort(key=target_prefs) + new_target = target_specs[0].architecture.target + return new_target + def concretize_variants(self, spec): """If the spec already has variants filled in, return. Otherwise, add the user preferences from packages.yaml or the default variants from @@ -526,7 +535,12 @@ class Concretizer(object): current_platform = spack.architecture.get_platform( spec.architecture.platform ) - if current_target != current_platform.target('default_target') or \ + + default_target = current_platform.target('default_target') + if PackagePrefs.has_preferred_targets(spec.name): + default_target = self.target_from_package_preferences(spec) + + if current_target != default_target or \ (self.abstract_spec.architecture is not None and self.abstract_spec.architecture.target is not None): return False @@ -544,6 +558,11 @@ class Concretizer(object): continue if candidate is not None: + msg = ('{0.name}@{0.version} cannot build optimized ' + 'binaries for "{1}". Using best target possible: ' + '"{2}"') + msg = msg.format(spec.compiler, current_target, candidate) + tty.warn(msg) spec.architecture.target = candidate return True else: diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 592b515dbf..e0774909f4 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -11,6 +11,7 @@ import spack.concretize import spack.repo from spack.concretize import find_spec, NoValidVersionError +from spack.package_prefs import PackagePrefs from spack.spec import Spec, CompilerSpec from spack.spec import ConflictsInSpecError, SpecError from spack.version import ver @@ -83,13 +84,25 @@ def spec(request): @pytest.fixture(params=[ - 'haswell', 'broadwell', 'skylake', 'icelake' + # Mocking the host detection + 'haswell', 'broadwell', 'skylake', 'icelake', + # Using preferred targets from packages.yaml + 'icelake-preference', 'cannonlake-preference' ]) def current_host(request, monkeypatch): - target = llnl.util.cpu.targets[request.param] - monkeypatch.setattr(llnl.util.cpu, 'host', lambda: target) - monkeypatch.setattr(spack.platforms.test.Test, 'default', request.param) - return target + # is_preference is not empty if we want to supply the + # preferred target via packages.yaml + cpu, _, is_preference = request.param.partition('-') + target = llnl.util.cpu.targets[cpu] + if not is_preference: + monkeypatch.setattr(llnl.util.cpu, 'host', lambda: target) + monkeypatch.setattr(spack.platforms.test.Test, 'default', cpu) + yield target + else: + # There's a cache that needs to be cleared for unit tests + PackagePrefs._packages_config_cache = None + with spack.config.override('packages:all', {'target': [cpu]}): + yield target @pytest.mark.usefixtures('config', 'mock_packages') -- cgit v1.2.3-70-g09d2 From eb286bb80f6e4422f373a6a16cffa6342f6d8043 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 1 Nov 2019 11:38:11 +0100 Subject: Specs with quoted flags containing spaces are parsed correctly (#13521) --- lib/spack/spack/cmd/__init__.py | 5 ++++- lib/spack/spack/test/cmd/common/arguments.py | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 9dd6dc4c6e..fe0ed0c224 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -24,6 +24,7 @@ import spack.paths import spack.spec import spack.store import spack.util.spack_json as sjson +import spack.util.string from spack.error import SpackError @@ -134,7 +135,9 @@ def parse_specs(args, **kwargs): tests = kwargs.get('tests', False) try: - sargs = args if isinstance(args, six.string_types) else ' '.join(args) + sargs = args + if not isinstance(args, six.string_types): + sargs = ' '.join(spack.util.string.quote(args)) specs = spack.spec.parse(sargs) for spec in specs: if concretize: diff --git a/lib/spack/spack/test/cmd/common/arguments.py b/lib/spack/spack/test/cmd/common/arguments.py index 2d32342c1b..82ae495611 100644 --- a/lib/spack/spack/test/cmd/common/arguments.py +++ b/lib/spack/spack/test/cmd/common/arguments.py @@ -9,6 +9,7 @@ import multiprocessing import pytest +import spack.cmd import spack.cmd.common.arguments as arguments import spack.config @@ -62,3 +63,20 @@ def test_negative_integers_not_allowed_for_parallel_jobs(parser): parser.parse_args(['-j', '-2']) assert 'expected a positive integer' in str(exc_info.value) + + +@pytest.mark.parametrize('specs,expected_variants,unexpected_variants', [ + (['coreutils', 'cflags=-O3 -g'], [], ['g']), + (['coreutils', 'cflags=-O3', '-g'], ['g'], []), +]) +@pytest.mark.regression('12951') +def test_parse_spec_flags_with_spaces( + specs, expected_variants, unexpected_variants +): + spec_list = spack.cmd.parse_specs(specs) + assert len(spec_list) == 1 + + s = spec_list.pop() + + assert all(x not in s.variants for x in unexpected_variants) + assert all(x in s.variants for x in expected_variants) -- cgit v1.2.3-70-g09d2 From 2a9d6b9fbf5a2694dccd77fd75bb6517f6430dc1 Mon Sep 17 00:00:00 2001 From: Gregory Lee Date: Fri, 1 Nov 2019 03:39:45 -0700 Subject: sbang: use utf-8 for encoding when patching (#13490) This fixes a UnicodeDecodeError in the sbang patching function. --- lib/spack/spack/hooks/sbang.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 6987815bc8..422f240ce8 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -6,6 +6,7 @@ import os import stat import re +import sys import llnl.util.tty as tty @@ -33,8 +34,12 @@ def shebang_too_long(path): def filter_shebang(path): """Adds a second shebang line, using sbang, at the beginning of a file.""" - with open(path, 'r') as original_file: + with open(path, 'rb') as original_file: original = original_file.read() + if sys.version_info >= (2, 7): + original = original.decode(encoding='UTF-8') + else: + original = original.decode('UTF-8') # This line will be prepended to file new_sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.paths.prefix @@ -61,9 +66,13 @@ def filter_shebang(path): saved_mode = st.st_mode os.chmod(path, saved_mode | stat.S_IWRITE) - with open(path, 'w') as new_file: - new_file.write(new_sbang_line) - new_file.write(original) + with open(path, 'wb') as new_file: + if sys.version_info >= (2, 7): + new_file.write(new_sbang_line.encode(encoding='UTF-8')) + new_file.write(original.encode(encoding='UTF-8')) + else: + new_file.write(new_sbang_line.encode('UTF-8')) + new_file.write(original.encode('UTF-8')) # Restore original permissions. if saved_mode is not None: -- cgit v1.2.3-70-g09d2 From 01a0d554f595bc43bc5295d28248b992a53aa5f7 Mon Sep 17 00:00:00 2001 From: Omar Padron Date: Fri, 1 Nov 2019 06:42:43 -0400 Subject: bugfix: spack.util.url.join() now handles absolute paths correctly (#13488) * fix issue where spack.util.url.join() failed to correctly handle absolute path components * add url util tests --- lib/spack/spack/test/util/util_url.py | 303 ++++++++++++++++++++++++++++++++++ lib/spack/spack/util/url.py | 77 ++++++++- lib/spack/spack/util/web.py | 1 - 3 files changed, 379 insertions(+), 2 deletions(-) create mode 100644 lib/spack/spack/test/util/util_url.py diff --git a/lib/spack/spack/test/util/util_url.py b/lib/spack/spack/test/util/util_url.py new file mode 100644 index 0000000000..24b40ac63c --- /dev/null +++ b/lib/spack/spack/test/util/util_url.py @@ -0,0 +1,303 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""Test Spack's URL handling utility functions.""" +import os +import os.path +import spack.util.url as url_util + + +def test_url_parse(): + parsed = url_util.parse('/path/to/resource') + assert(parsed.scheme == 'file') + assert(parsed.netloc == '') + assert(parsed.path == '/path/to/resource') + + parsed = url_util.parse('/path/to/resource', scheme='fake') + assert(parsed.scheme == 'fake') + assert(parsed.netloc == '') + assert(parsed.path == '/path/to/resource') + + parsed = url_util.parse('file:///path/to/resource') + assert(parsed.scheme == 'file') + assert(parsed.netloc == '') + assert(parsed.path == '/path/to/resource') + + parsed = url_util.parse('file:///path/to/resource', scheme='fake') + assert(parsed.scheme == 'file') + assert(parsed.netloc == '') + assert(parsed.path == '/path/to/resource') + + parsed = url_util.parse('file://path/to/resource') + assert(parsed.scheme == 'file') + assert(parsed.netloc == '') + expected = os.path.abspath(os.path.join('path', 'to', 'resource')) + assert(parsed.path == expected) + + parsed = url_util.parse('https://path/to/resource') + assert(parsed.scheme == 'https') + assert(parsed.netloc == 'path') + assert(parsed.path == '/to/resource') + + spack_root = os.path.abspath(os.environ['SPACK_ROOT']) + parsed = url_util.parse('$spack') + assert(parsed.scheme == 'file') + assert(parsed.netloc == '') + assert(parsed.path == spack_root) + + parsed = url_util.parse('/a/b/c/$spack') + assert(parsed.scheme == 'file') + assert(parsed.netloc == '') + expected = os.path.abspath(os.path.join( + '/', 'a', 'b', 'c', './' + spack_root)) + assert(parsed.path == expected) + + +def test_url_local_file_path(): + spack_root = os.path.abspath(os.environ['SPACK_ROOT']) + + lfp = url_util.local_file_path('/a/b/c.txt') + assert(lfp == '/a/b/c.txt') + + lfp = url_util.local_file_path('file:///a/b/c.txt') + assert(lfp == '/a/b/c.txt') + + lfp = url_util.local_file_path('file://a/b/c.txt') + expected = os.path.abspath(os.path.join('a', 'b', 'c.txt')) + assert(lfp == expected) + + lfp = url_util.local_file_path('$spack/a/b/c.txt') + expected = os.path.abspath(os.path.join(spack_root, 'a', 'b', 'c.txt')) + assert(lfp == expected) + + lfp = url_util.local_file_path('file:///$spack/a/b/c.txt') + expected = os.path.abspath(os.path.join(spack_root, 'a', 'b', 'c.txt')) + assert(lfp == expected) + + lfp = url_util.local_file_path('file://$spack/a/b/c.txt') + expected = os.path.abspath(os.path.join(spack_root, 'a', 'b', 'c.txt')) + assert(lfp == expected) + + # not a file:// URL - so no local file path + lfp = url_util.local_file_path('http:///a/b/c.txt') + assert(lfp is None) + + lfp = url_util.local_file_path('http://a/b/c.txt') + assert(lfp is None) + + lfp = url_util.local_file_path('http:///$spack/a/b/c.txt') + assert(lfp is None) + + lfp = url_util.local_file_path('http://$spack/a/b/c.txt') + assert(lfp is None) + + +def test_url_join_local_paths(): + # Resolve local link against page URL + + # wrong: + assert( + url_util.join( + 's3://bucket/index.html', + '../other-bucket/document.txt') + == + 's3://bucket/other-bucket/document.txt') + + # correct - need to specify resolve_href=True: + assert( + url_util.join( + 's3://bucket/index.html', + '../other-bucket/document.txt', + resolve_href=True) + == + 's3://other-bucket/document.txt') + + # same as above: make sure several components are joined together correctly + assert( + url_util.join( + # with resolve_href=True, first arg is the base url; can not be + # broken up + 's3://bucket/index.html', + + # with resolve_href=True, remaining arguments are the components of + # the local href that needs to be resolved + '..', 'other-bucket', 'document.txt', + resolve_href=True) + == + 's3://other-bucket/document.txt') + + # Append local path components to prefix URL + + # wrong: + assert( + url_util.join( + 'https://mirror.spack.io/build_cache', + 'my-package', + resolve_href=True) + == + 'https://mirror.spack.io/my-package') + + # correct - Need to specify resolve_href=False: + assert( + url_util.join( + 'https://mirror.spack.io/build_cache', + 'my-package', + resolve_href=False) + == + 'https://mirror.spack.io/build_cache/my-package') + + # same as above; make sure resolve_href=False is default + assert( + url_util.join( + 'https://mirror.spack.io/build_cache', + 'my-package') + == + 'https://mirror.spack.io/build_cache/my-package') + + # same as above: make sure several components are joined together correctly + assert( + url_util.join( + # with resolve_href=False, first arg is just a prefix. No + # resolution is done. So, there should be no difference between + # join('/a/b/c', 'd/e'), + # join('/a/b', 'c', 'd/e'), + # join('/a', 'b/c', 'd', 'e'), etc. + 'https://mirror.spack.io', + 'build_cache', + 'my-package') + == + 'https://mirror.spack.io/build_cache/my-package') + + # file:// URL path components are *NOT* canonicalized + spack_root = os.path.abspath(os.environ['SPACK_ROOT']) + + join_result = url_util.join('/a/b/c', '$spack') + assert(join_result == 'file:///a/b/c/$spack') # not canonicalized + format_result = url_util.format(join_result) + # canoncalize by hand + expected = url_util.format(os.path.abspath(os.path.join( + '/', 'a', 'b', 'c', '.' + spack_root))) + assert(format_result == expected) + + # see test_url_join_absolute_paths() for more on absolute path components + join_result = url_util.join('/a/b/c', '/$spack') + assert(join_result == 'file:///$spack') # not canonicalized + format_result = url_util.format(join_result) + expected = url_util.format(spack_root) + assert(format_result == expected) + + # For s3:// URLs, the "netloc" (bucket) is considered part of the path. + # Make sure join() can cross bucket boundaries in this case. + args = ['s3://bucket/a/b', 'new-bucket', 'c'] + assert(url_util.join(*args) == 's3://bucket/a/b/new-bucket/c') + + args.insert(1, '..') + assert(url_util.join(*args) == 's3://bucket/a/new-bucket/c') + + args.insert(1, '..') + assert(url_util.join(*args) == 's3://bucket/new-bucket/c') + + # new-bucket is now the "netloc" (bucket name) + args.insert(1, '..') + assert(url_util.join(*args) == 's3://new-bucket/c') + + +def test_url_join_absolute_paths(): + # Handling absolute path components is a little tricky. To this end, we + # distinguish "absolute path components", from the more-familiar concept of + # "absolute paths" as they are understood for local filesystem paths. + # + # - All absolute paths are absolute path components. Joining a URL with + # these components has the effect of completely replacing the path of the + # URL with the absolute path. These components do not specify a URL + # scheme, so the scheme of the URL procuced when joining them depend on + # those provided by components that came before it (file:// assumed if no + # such scheme is provided). + + # For eaxmple: + p = '/path/to/resource' + # ...is an absolute path + + # http:// URL + assert( + url_util.join('http://example.com/a/b/c', p) + == 'http://example.com/path/to/resource') + + # s3:// URL + # also notice how the netloc is treated as part of the path for s3:// URLs + assert( + url_util.join('s3://example.com/a/b/c', p) + == 's3://path/to/resource') + + # - URL components that specify a scheme are always absolute path + # components. Joining a base URL with these components effectively + # discards the base URL and "resets" the joining logic starting at the + # component in question and using it as the new base URL. + + # For eaxmple: + p = 'http://example.com/path/to' + # ...is an http:// URL + + join_result = url_util.join(p, 'resource') + assert(join_result == 'http://example.com/path/to/resource') + + # works as if everything before the http:// URL was left out + assert( + url_util.join( + 'literally', 'does', 'not', 'matter', + p, 'resource') + == join_result) + + # It's important to keep in mind that this logic applies even if the + # component's path is not an absolute path! + + # For eaxmple: + p = './d' + # ...is *NOT* an absolute path + # ...is also *NOT* an absolute path component + + u = 'file://./d' + # ...is a URL + # The path of this URL is *NOT* an absolute path + # HOWEVER, the URL, itself, *is* an absolute path component + + # (We just need... + cwd = os.getcwd() + # ...to work out what resource it points to) + + # So, even though parse() assumes "file://" URL, the scheme is still + # significant in URL path components passed to join(), even if the base + # is a file:// URL. + + path_join_result = 'file:///a/b/c/d' + assert(url_util.join('/a/b/c', p) == path_join_result) + assert(url_util.join('file:///a/b/c', p) == path_join_result) + + url_join_result = 'file://{CWD}/d'.format(CWD=cwd) + assert(url_util.join('/a/b/c', u) == url_join_result) + assert(url_util.join('file:///a/b/c', u) == url_join_result) + + # Finally, resolve_href should have no effect for how absolute path + # components are handled because local hrefs can not be absolute path + # components. + args = ['s3://does', 'not', 'matter', + 'http://example.com', + 'also', 'does', 'not', 'matter', + '/path'] + + expected = 'http://example.com/path' + assert(url_util.join(*args, resolve_href=True) == expected) + assert(url_util.join(*args, resolve_href=False) == expected) + + # resolve_href only matters for the local path components at the end of the + # argument list. + args[-1] = '/path/to/page' + args.extend(('..', '..', 'resource')) + + assert(url_util.join(*args, resolve_href=True) == + 'http://example.com/resource') + + assert(url_util.join(*args, resolve_href=False) == + 'http://example.com/path/resource') diff --git a/lib/spack/spack/util/url.py b/lib/spack/spack/util/url.py index 6b2786f244..7ac12e7b81 100644 --- a/lib/spack/spack/util/url.py +++ b/lib/spack/spack/util/url.py @@ -51,7 +51,7 @@ def local_file_path(url): def parse(url, scheme='file'): - """Parse a mirror url. + """Parse a url. For file:// URLs, the netloc and path components are concatenated and passed through spack.util.path.canoncalize_path(). @@ -105,6 +105,9 @@ def join(base_url, path, *extra, **kwargs): If resolve_href is False (default), then the URL path components are joined as in os.path.join(). + Note: file:// URL path components are not canonicalized as part of this + operation. To canonicalize, pass the joined url to format(). + Examples: base_url = 's3://bucket/index.html' body = fetch_body(prefix) @@ -127,7 +130,79 @@ def join(base_url, path, *extra, **kwargs): # correct - simply append additional URL path components spack.util.url.join(prefix, 'my-package', resolve_href=False) # default 'https://mirror.spack.io/build_cache/my-package' + + # For canonicalizing file:// URLs, take care to explicitly differentiate + # between absolute and relative join components. + + # '$spack' is not an absolute path component + join_result = spack.util.url.join('/a/b/c', '$spack') ; join_result + 'file:///a/b/c/$spack' + spack.util.url.format(join_result) + 'file:///a/b/c/opt/spack' + + # '/$spack' *is* an absolute path component + join_result = spack.util.url.join('/a/b/c', '/$spack') ; join_result + 'file:///$spack' + spack.util.url.format(join_result) + 'file:///opt/spack' """ + paths = [ + (x if isinstance(x, string_types) else x.geturl()) + for x in itertools.chain((base_url, path), extra)] + n = len(paths) + last_abs_component = None + scheme = None + for i in range(n - 1, -1, -1): + obj = urllib_parse.urlparse( + paths[i], scheme=None, allow_fragments=False) + + scheme = obj.scheme + + # in either case the component is absolute + if scheme is not None or obj.path.startswith('/'): + if scheme is None: + # Without a scheme, we have to go back looking for the + # next-last component that specifies a scheme. + for j in range(i - 1, -1, -1): + obj = urllib_parse.urlparse( + paths[j], scheme=None, allow_fragments=False) + + if obj.scheme: + paths[i] = '{SM}://{NL}{PATH}'.format( + SM=obj.scheme, + NL=( + (obj.netloc + '/') + if obj.scheme != 's3' else ''), + PATH=paths[i][1:]) + break + + last_abs_component = i + break + + if last_abs_component is not None: + paths = paths[last_abs_component:] + if len(paths) == 1: + result = urllib_parse.urlparse( + paths[0], scheme='file', allow_fragments=False) + + # another subtlety: If the last argument to join() is an absolute + # file:// URL component with a relative path, the relative path + # needs to be resolved. + if result.scheme == 'file' and result.netloc: + result = urllib_parse.ParseResult( + scheme=result.scheme, + netloc='', + path=os.path.abspath(result.netloc + result.path), + params=result.params, + query=result.query, + fragment=None) + + return result.geturl() + + return _join(*paths, **kwargs) + + +def _join(base_url, path, *extra, **kwargs): base_url = parse(base_url) resolve_href = kwargs.get('resolve_href', False) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index abf549cc89..f2afe769c6 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -477,7 +477,6 @@ def spider(root, depth=0): performance over a sequential fetch. """ - root = url_util.parse(root) pages, links = _spider(root, set(), root, 0, depth, False) return pages, links -- cgit v1.2.3-70-g09d2 From d670765b977666ab865d90aecfb2c7f4a3321391 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 1 Nov 2019 03:52:04 -0700 Subject: version bump: 0.13.1 --- lib/spack/spack/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 3c2e922680..37af6630cf 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -5,7 +5,7 @@ #: major, minor, patch version for Spack, in a tuple -spack_version_info = (0, 13, 0) +spack_version_info = (0, 13, 1) #: String containing Spack version joined with .'s spack_version = '.'.join(str(v) for v in spack_version_info) -- cgit v1.2.3-70-g09d2 From 7cdb241f8087dea9815feaebc101518343822f65 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Sun, 3 Nov 2019 17:46:41 -0600 Subject: environments: only write when necessary (#13546) This changes Spack environments so that the YAML file associated with the environment is *only* written when necessary (i.e., if it is changed *by spack*). The lockfile is still written out as before. There is a larger question here of which part of Spack should be responsible for setting defaults in config files, and how we can get rid of empty lists and data structures currently cluttering files like `compilers.yaml`. But that probably requires a rework of the default-setting validator in `spack.config`, as well as the code that uses `spack.config`. This will at least help for `spack.yaml`. --- lib/spack/spack/cmd/release_jobs.py | 4 +- lib/spack/spack/environment.py | 76 ++++++++++++++++++++++++++----------- lib/spack/spack/schema/env.py | 1 + lib/spack/spack/test/cmd/env.py | 10 +++++ 4 files changed, 66 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/cmd/release_jobs.py b/lib/spack/spack/cmd/release_jobs.py index c5acc4ee2d..44fcc8511a 100644 --- a/lib/spack/spack/cmd/release_jobs.py +++ b/lib/spack/spack/cmd/release_jobs.py @@ -409,9 +409,7 @@ def find_matching_config(spec, ci_mappings): def release_jobs(parser, args): env = ev.get_env(args, 'release-jobs', required=True) - # FIXME: What's the difference between one that opens with 'spack' - # and one that opens with 'env'? This will only handle the former. - yaml_root = env.yaml['spack'] + yaml_root = ev.config_dict(env.yaml) if 'gitlab-ci' not in yaml_root: tty.die('Environment yaml does not have "gitlab-ci" section') diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index b6b1814819..2507d1a254 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -70,8 +70,7 @@ default_manifest_yaml = """\ # configuration settings. spack: # add package specs to the `specs` list - specs: - - + specs: [] view: true """ #: regex for validating enviroment names @@ -391,20 +390,31 @@ def all_environments(): def validate(data, filename=None): + # validating changes data by adding defaults. Return validated data + validate_data = copy.deepcopy(data) + # HACK to fully copy ruamel CommentedMap that doesn't provide copy method + import ruamel.yaml as yaml + setattr( + validate_data, + yaml.comments.Comment.attrib, + getattr(data, yaml.comments.Comment.attrib, yaml.comments.Comment()) + ) + import jsonschema try: - spack.schema.Validator(spack.schema.env.schema).validate(data) + spack.schema.Validator(spack.schema.env.schema).validate(validate_data) except jsonschema.ValidationError as e: raise spack.config.ConfigFormatError( e, data, filename, e.instance.lc.line + 1) + return validate_data def _read_yaml(str_or_file): """Read YAML from a file for round-trip parsing.""" data = syaml.load_config(str_or_file) filename = getattr(str_or_file, 'name', None) - validate(data, filename) - return data + default_data = validate(data, filename) + return (data, default_data) def _write_yaml(data, str_or_file): @@ -444,6 +454,13 @@ class ViewDescriptor(object): for e in self.exclude) self.link = link + def __eq__(self, other): + return all([self.root == other.root, + self.projections == other.projections, + self.select == other.select, + self.exclude == other.exclude, + self.link == other.link]) + def to_dict(self): ret = {'root': self.root} if self.projections: @@ -540,7 +557,7 @@ class Environment(object): self._read_lockfile(f) self._set_user_specs_from_lockfile() else: - self._read_manifest(f) + self._read_manifest(f, raw_yaml=default_manifest_yaml) else: default_manifest = not os.path.exists(self.manifest_path) if default_manifest: @@ -573,9 +590,13 @@ class Environment(object): # If with_view is None, then defer to the view settings determined by # the manifest file - def _read_manifest(self, f): + def _read_manifest(self, f, raw_yaml=None): """Read manifest file and set up user specs.""" - self.yaml = _read_yaml(f) + if raw_yaml: + _, self.yaml = _read_yaml(f) + self.raw_yaml, _ = _read_yaml(raw_yaml) + else: + self.raw_yaml, self.yaml = _read_yaml(f) self.spec_lists = OrderedDict() @@ -1311,6 +1332,9 @@ class Environment(object): # ensure path in var/spack/environments fs.mkdirp(self.path) + yaml_dict = config_dict(self.yaml) + raw_yaml_dict = config_dict(self.raw_yaml) + if self.specs_by_hash: # ensure the prefix/.env directory exists fs.mkdirp(self.env_subdir_path) @@ -1345,8 +1369,7 @@ class Environment(object): # The primary list is handled differently continue - conf = config_dict(self.yaml) - active_yaml_lists = [l for l in conf.get('definitions', []) + active_yaml_lists = [l for l in yaml_dict.get('definitions', []) if name in l and _eval_conditional(l.get('when', 'True'))] @@ -1370,10 +1393,11 @@ class Environment(object): # put the new user specs in the YAML. # This can be done directly because there can't be multiple definitions # nor when clauses for `specs` list. - yaml_spec_list = config_dict(self.yaml).setdefault(user_speclist_name, - []) + yaml_spec_list = yaml_dict.setdefault(user_speclist_name, + []) yaml_spec_list[:] = self.user_specs.yaml_list + # Construct YAML representation of view default_name = default_view_name if self.views and len(self.views) == 1 and default_name in self.views: path = self.default_view.root @@ -1390,18 +1414,26 @@ class Environment(object): else: view = False - yaml_dict = config_dict(self.yaml) - if view is not True: - # The default case is to keep an active view inside of the - # Spack environment directory. To avoid cluttering the config, - # we omit the setting in this case. - yaml_dict['view'] = view - elif 'view' in yaml_dict: - del yaml_dict['view'] + yaml_dict['view'] = view + + # Remove yaml sections that are shadowing defaults + # construct garbage path to ensure we don't find a manifest by accident + bare_env = Environment(os.path.join(self.manifest_path, 'garbage'), + with_view=self.view_path_default) + keys_present = list(yaml_dict.keys()) + for key in keys_present: + if yaml_dict[key] == config_dict(bare_env.yaml).get(key, None): + if key not in raw_yaml_dict: + del yaml_dict[key] # if all that worked, write out the manifest file at the top level - with fs.write_tmp_and_move(self.manifest_path) as f: - _write_yaml(self.yaml, f) + # Only actually write if it has changed or was never written + changed = self.yaml != self.raw_yaml + written = os.path.exists(self.manifest_path) + if changed or not written: + self.raw_yaml = copy.deepcopy(self.yaml) + with fs.write_tmp_and_move(self.manifest_path) as f: + _write_yaml(self.yaml, f) # TODO: for operations that just add to the env (install etc.) this # could just call update_view diff --git a/lib/spack/spack/schema/env.py b/lib/spack/spack/schema/env.py index 0af877185a..dd3093d05d 100644 --- a/lib/spack/spack/schema/env.py +++ b/lib/spack/spack/schema/env.py @@ -63,6 +63,7 @@ schema = { { 'include': { 'type': 'array', + 'default': [], 'items': { 'type': 'string' }, diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index c3fb911336..4e544ef757 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -1749,3 +1749,13 @@ def test_duplicate_packages_raise_when_concretizing_together(): with pytest.raises(ev.SpackEnvironmentError, match=r'cannot contain more'): e.concretize() + + +def test_env_write_only_non_default(): + print(env('create', 'test')) + + e = ev.read('test') + with open(e.manifest_path, 'r') as f: + yaml = f.read() + + assert yaml == ev.default_manifest_yaml -- cgit v1.2.3-70-g09d2 From edf9548310b59bf7c52d75f02b619520bba467ad Mon Sep 17 00:00:00 2001 From: Omar Padron Date: Sun, 3 Nov 2019 20:11:30 -0500 Subject: bugfix: fetch prefers to fetch local mirrors over remote resources (#13545) - [x] insert at beginning of list so fetch grabs local mirrors before remote resources - [x] update the S3FetchStrategy so that it throws a SpackError if the fetch fails. Before, it was throwing URLError, which was not being caught in stage.py. - [x] move error handling out of S3FetchStrategy and into web_util.read_from_url() - [x] pass string instead of URLError to SpackWebError --- lib/spack/spack/fetch_strategy.py | 4 ++-- lib/spack/spack/stage.py | 8 +++----- lib/spack/spack/util/web.py | 7 ++++++- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 393e3af9d1..5a57703d27 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -1126,7 +1126,7 @@ class S3FetchStrategy(URLFetchStrategy): parsed_url = url_util.parse(self.url) if parsed_url.scheme != 's3': - raise ValueError( + raise FetchError( 'S3FetchStrategy can only fetch from s3:// urls.') tty.msg("Fetching %s" % self.url) @@ -1392,7 +1392,7 @@ class NoCacheError(FetchError): class FailedDownloadError(FetchError): - """Raised wen a download fails.""" + """Raised when a download fails.""" def __init__(self, url, msg=""): super(FailedDownloadError, self).__init__( "Failed to fetch file from URL: %s" % url, msg) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 7869c5f863..d2dd3e6e7a 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -433,11 +433,9 @@ class Stage(object): # Add URL strategies for all the mirrors with the digest for url in urls: - fetchers.append(fs.from_url_scheme( - url, digest, expand=expand, extension=extension)) - # fetchers.insert( - # 0, fs.URLFetchStrategy( - # url, digest, expand=expand, extension=extension)) + fetchers.insert( + 0, fs.from_url_scheme( + url, digest, expand=expand, extension=extension)) if self.default_fetcher.cachable: for rel_path in reversed(list(self.mirror_paths)): diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index f2afe769c6..1fe58d6415 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -177,7 +177,12 @@ def read_from_url(url, accept_content_type=None): # Do the real GET request when we know it's just HTML. req.get_method = lambda: "GET" - response = _urlopen(req, timeout=_timeout, context=context) + + try: + response = _urlopen(req, timeout=_timeout, context=context) + except URLError as err: + raise SpackWebError('Download failed: {ERROR}'.format( + ERROR=str(err))) if accept_content_type and not is_web_url: content_type = response.headers.get('Content-type') -- cgit v1.2.3-70-g09d2 From 385e41d70b6562e19a549f9141697ba12b4e7b6d Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Mon, 4 Nov 2019 23:03:35 -0600 Subject: binary distribution: relocate text files properly in relative binaries (#13578) * Make relative binaries relocate text files properly * rb strings aren't valid in python 2 * move perl to new interface for setup_environment family methods --- lib/spack/spack/binary_distribution.py | 2 -- lib/spack/spack/relocate.py | 20 +++++++++------ var/spack/repos/builtin/packages/perl/package.py | 32 ++++++++++++++++++------ 3 files changed, 37 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index eafa1dcdb1..2dcb75860b 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -520,8 +520,6 @@ def relocate_package(workdir, spec, allow_root): old_prefix = str(buildinfo.get('spackprefix', '/not/in/buildinfo/dictionary')) rel = buildinfo.get('relative_rpaths', False) - if rel: - return tty.msg("Relocating package from", "%s to %s." % (old_path, new_path)) diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index 8e4350a7b1..9a54be1c64 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -360,17 +360,21 @@ def replace_prefix_text(path_name, old_dir, new_dir): Replace old install prefix with new install prefix in text files using utf-8 encoded strings. """ - - def replace(match): - return match.group().replace(old_dir.encode('utf-8'), - new_dir.encode('utf-8')) with open(path_name, 'rb+') as f: data = f.read() f.seek(0) - pat = re.compile(old_dir.encode('utf-8')) - if not pat.search(data): - return - ndata = pat.sub(replace, data) + # Replace old_dir with new_dir if it appears at the beginning of a path + # Negative lookbehind for a character legal in a path + # Then a match group for any characters legal in a compiler flag + # Then old_dir + # Then characters legal in a path + # Ensures we only match the old_dir if it's precedeed by a flag or by + # characters not legal in a path, but not if it's preceeded by other + # components of a path. + old_bytes = old_dir.encode('utf-8') + pat = b'(? Date: Fri, 1 Nov 2019 12:00:16 -0500 Subject: environments: make shell modifications partially unconditional (#13523) * environments: make shell modifications partially unconditional * flake * missing module name * add regression test * flake --- lib/spack/spack/environment.py | 24 +++++++++++++++++++++++- lib/spack/spack/test/cmd/env.py | 14 ++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 2507d1a254..4ab01cb250 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1055,8 +1055,24 @@ class Environment(object): '': ['CMAKE_PREFIX_PATH'] } + def unconditional_environment_modifications(self, view): + """List of environment (shell) modifications to be processed for view. + + This list does not depend on the specs in this environment""" + env = spack.util.environment.EnvironmentModifications() + + for subdir, vars in self.prefix_inspections.items(): + full_subdir = os.path.join(view.root, subdir) + for var in vars: + env.prepend_path(var, full_subdir) + + return env + def environment_modifications_for_spec(self, spec, view=None): - """List of environment modifications to be processed.""" + """List of environment (shell) modifications to be processed for spec. + + This list is specific to the location of the spec or its projection in + the view.""" spec = spec.copy() if view: spec.prefix = Prefix(view.view().get_projection_for_spec(spec)) @@ -1089,6 +1105,9 @@ class Environment(object): # No default view to add to shell return env_mod.shell_modifications(shell) + env_mod.extend(self.unconditional_environment_modifications( + self.default_view)) + for _, spec in self.concretized_specs(): if spec in self.default_view: env_mod.extend(self.environment_modifications_for_spec( @@ -1107,6 +1126,9 @@ class Environment(object): # No default view to add to shell return env_mod.shell_modifications(shell) + env_mod.extend(self.unconditional_environment_modifications( + self.default_view).reversed()) + for _, spec in self.concretized_specs(): if spec in self.default_view: env_mod.extend( diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 4e544ef757..b1e9e579ae 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -1703,6 +1703,20 @@ def test_env_activate_csh_prints_shell_output( assert "alias despacktivate" in out +@pytest.mark.regression('12719') +def test_env_activate_default_view_root_unconditional(env_deactivate, + mutable_mock_env_path): + """Check that the root of the default view in the environment is added + to the shell unconditionally.""" + env('create', 'test', add_view=True) + + with ev.read('test') as e: + viewdir = e.default_view.root + + out = env('activate', '--sh', 'test') + assert 'PATH=%s' % os.path.join(viewdir, 'bin') in out + + def test_concretize_user_specs_together(): e = ev.create('coconcretization') e.concretization = 'together' -- cgit v1.2.3-70-g09d2 From ecfe84def70c8ccba32c2c3d7d3a25296e10dc59 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 5 Nov 2019 02:37:49 -0700 Subject: bugfix: uninstall should find concrete specs by DAG hash This fixes a regression introduced in #10792. `spack uninstall` in an environment would not match concrete query specs properly after the index hash of enviroments changed. - [x] Search by DAG hash for specs to remove instead of by build hash --- lib/spack/spack/environment.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 4ab01cb250..f16599c215 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -833,8 +833,16 @@ class Environment(object): if not matches: # concrete specs match against concrete specs in the env + # by *dag hash*, not build hash. + dag_hashes_in_order = [ + self.specs_by_hash[build_hash].dag_hash() + for build_hash in self.concretized_order + ] + specs_hashes = zip( - self.concretized_user_specs, self.concretized_order) + self.concretized_user_specs, dag_hashes_in_order + ) + matches = [ s for s, h in specs_hashes if query_spec.dag_hash() == h -- cgit v1.2.3-70-g09d2 From b4501c458623ed12cfb9a93cb6ebdfc4604b3fe2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 5 Nov 2019 01:26:03 -0700 Subject: `spack find` now displays variants and other spec constraints If you do this in a spack environment: spack add hdf5+hl hdf5+hl will be the root added to the `spack.yaml` file, and you should really expect `hdf5+hl` to display as a root in the environment. - [x] Add decoration to roots so that you can see the details about what is required to build. - [x] Add a test. --- lib/spack/spack/cmd/__init__.py | 2 +- lib/spack/spack/cmd/find.py | 14 +++++++++++--- lib/spack/spack/test/cmd/env.py | 11 +++++++++++ 3 files changed, 23 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index fe0ed0c224..0d2e92940b 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -331,7 +331,7 @@ def display_specs(specs, args=None, **kwargs): format_string = get_arg('format', None) if format_string is None: - nfmt = '{namespace}.{name}' if namespace else '{name}' + nfmt = '{fullname}' if namespace else '{name}' ffmt = '' if full_compiler or flags: ffmt += '{%compiler.name}' diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index ed8f2ed2bf..37cfcd30d5 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -165,10 +165,18 @@ def display_env(env, args, decorator): tty.msg('No root specs') else: tty.msg('Root specs') - # TODO: Change this to not print extraneous deps and variants + + # Roots are displayed with variants, etc. so that we can see + # specifically what the user asked for. cmd.display_specs( - env.user_specs, args, - decorator=lambda s, f: color.colorize('@*{%s}' % f)) + env.user_specs, + args, + decorator=lambda s, f: color.colorize('@*{%s}' % f), + namespace=True, + show_flags=True, + show_full_compiler=True, + variants=True + ) print() if args.show_concretized: diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index b1e9e579ae..9b3ca6e66e 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -615,6 +615,17 @@ def test_env_blocks_uninstall(mock_stage, mock_fetch, install_mockery): assert 'used by the following environments' in out +def test_roots_display_with_variants(): + env('create', 'test') + with ev.read('test'): + add('boost+shared') + + with ev.read('test'): + out = find(output=str) + + assert "boost +shared" in out + + def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery): env('create', 'test') with ev.read('test'): -- cgit v1.2.3-70-g09d2 From b280034380f82c1badbe320fe6763ef04620c8fe Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Wed, 13 Nov 2019 16:03:16 -0800 Subject: Allow binary relocation of strings in relative binaries (#13724) Binaries with relative RPATHS currently do not relocate strings hard-coded in binaries This PR extends the best-effort relocation of strings hard-coded in binaries to those whose RPATHs have been relativized. --- lib/spack/spack/binary_distribution.py | 17 ++++++++++++++++- lib/spack/spack/relocate.py | 23 +++++++++-------------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 2dcb75860b..23e5126303 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -534,7 +534,22 @@ def relocate_package(workdir, spec, allow_root): newprefix=new_prefix) # If the binary files in the package were not edited to use # relative RPATHs, then the RPATHs need to be relocated - if not rel: + if rel: + if old_path != new_path: + files_to_relocate = list(filter( + lambda pathname: not relocate.file_is_relocatable( + pathname, paths_to_relocate=[old_path, old_prefix]), + map(lambda filename: os.path.join(workdir, filename), + buildinfo['relocate_binaries']))) + + if len(old_path) < len(new_path) and files_to_relocate: + tty.debug('Cannot do a binary string replacement with padding ' + 'for package because %s is longer than %s.' % + (new_path, old_path)) + else: + for path_name in files_to_relocate: + relocate.replace_prefix_bin(path_name, old_path, new_path) + else: path_names = set() for filename in buildinfo['relocate_binaries']: path_name = os.path.join(workdir, filename) diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index 9a54be1c64..472195866e 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -637,7 +637,8 @@ def is_relocatable(spec): return True -def file_is_relocatable(file): +def file_is_relocatable( + file, paths_to_relocate=[spack.store.layout.root, spack.paths.prefix]): """Returns True if the file passed as argument is relocatable. Args: @@ -684,19 +685,13 @@ def file_is_relocatable(file): if idpath is not None: set_of_strings.discard(idpath) - if any(spack.store.layout.root in x for x in set_of_strings): - # One binary has the root folder not in the RPATH, - # meaning that this spec is not relocatable - msg = 'Found "{0}" in {1} strings' - tty.debug(msg.format(spack.store.layout.root, file)) - return False - - if any(spack.paths.prefix in x for x in set_of_strings): - # One binary has the root folder not in the RPATH, - # meaning that this spec is not relocatable - msg = 'Found "{0}" in {1} strings' - tty.debug(msg.format(spack.paths.prefix, file)) - return False + for path_to_relocate in paths_to_relocate: + if any(path_to_relocate in x for x in set_of_strings): + # One binary has the root folder not in the RPATH, + # meaning that this spec is not relocatable + msg = 'Found "{0}" in {1} strings' + tty.debug(msg.format(path_to_relocate, file)) + return False return True -- cgit v1.2.3-70-g09d2 From 28163cb34fda3a42c14fbe7ce036291748bcc576 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 14 Nov 2019 12:15:47 -0800 Subject: determine target relative to the link directory rather than the full link path (which includes the file name) (#13727) --- lib/spack/spack/relocate.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index 472195866e..744338d7e5 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -488,11 +488,11 @@ def make_link_relative(cur_path_names, orig_path_names): Change absolute links to be relative. """ for cur_path, orig_path in zip(cur_path_names, orig_path_names): - old_src = os.readlink(orig_path) - new_src = os.path.relpath(old_src, orig_path) + target = os.readlink(orig_path) + relative_target = os.path.relpath(target, os.path.dirname(orig_path)) os.unlink(cur_path) - os.symlink(new_src, cur_path) + os.symlink(relative_target, cur_path) def make_macho_binaries_relative(cur_path_names, orig_path_names, old_dir, -- cgit v1.2.3-70-g09d2 From 74e04b7e2003cf6bce5471fe5f3e9663c008a97b Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Thu, 14 Nov 2019 16:22:19 -0800 Subject: Config option to allow gpg warning suppression (#13744) Add a configuration option to suppress gpg warnings during binary package verification. This only suppresses warnings: a gpg failure will still fail the install. This allows users who have already explicitly trusted the gpg key they are using to avoid seeing repeated warnings that it is self-signed. --- etc/spack/defaults/config.yaml | 8 ++++++++ lib/spack/spack/binary_distribution.py | 4 +++- lib/spack/spack/schema/config.py | 1 + lib/spack/spack/util/gpg.py | 7 +++++-- 4 files changed, 17 insertions(+), 3 deletions(-) diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml index 6be1d9770b..3aadccfda1 100644 --- a/etc/spack/defaults/config.yaml +++ b/etc/spack/defaults/config.yaml @@ -80,6 +80,14 @@ config: verify_ssl: true + # Suppress gpg warnings from binary package verification + # Only suppresses warnings, gpg failure will still fail the install + # Potential rationale to set True: users have already explicitly trusted the + # gpg key they are using, and may not want to see repeated warnings that it + # is self-signed or something of the sort. + suppress_gpg_warnings: false + + # If set to true, Spack will attempt to build any compiler on the spec # that is not already available. If set to False, Spack will only use # compilers already configured in compilers.yaml diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 23e5126303..3b10cca180 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -21,6 +21,7 @@ import llnl.util.tty as tty from llnl.util.filesystem import mkdirp, install_tree import spack.cmd +import spack.config as config import spack.fetch_strategy as fs import spack.util.gpg as gpg_util import spack.relocate as relocate @@ -592,7 +593,8 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, if not unsigned: if os.path.exists('%s.asc' % specfile_path): try: - Gpg.verify('%s.asc' % specfile_path, specfile_path) + suppress = config.get('config:suppress_gpg_warnings', False) + Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress) except Exception as e: shutil.rmtree(tmpdir) tty.die(e) diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py index 6eb127a359..7d170bbc91 100644 --- a/lib/spack/spack/schema/config.py +++ b/lib/spack/spack/schema/config.py @@ -56,6 +56,7 @@ properties = { 'source_cache': {'type': 'string'}, 'misc_cache': {'type': 'string'}, 'verify_ssl': {'type': 'boolean'}, + 'suppress_gpg_warnings': {'type': 'boolean'}, 'install_missing_compilers': {'type': 'boolean'}, 'debug': {'type': 'boolean'}, 'checksum': {'type': 'boolean'}, diff --git a/lib/spack/spack/util/gpg.py b/lib/spack/spack/util/gpg.py index a5c10d2151..a7d1a3d8fa 100644 --- a/lib/spack/spack/util/gpg.py +++ b/lib/spack/spack/util/gpg.py @@ -100,8 +100,11 @@ class Gpg(object): cls.gpg()(*args) @classmethod - def verify(cls, signature, file): - cls.gpg()('--verify', signature, file) + def verify(cls, signature, file, suppress_warnings=False): + if suppress_warnings: + cls.gpg()('--verify', signature, file, error=str) + else: + cls.gpg()('--verify', signature, file) @classmethod def list(cls, trusted, signing): -- cgit v1.2.3-70-g09d2 From 46bfcbbd3d0a3bec635d7bf80b6b3e9d0e4fae59 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 15 Nov 2019 21:38:30 +0100 Subject: hwloc: added 'master' version pointing at the HEAD of the master branch (#13734) --- var/spack/repos/builtin/packages/hwloc/package.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py index 05e5d006cb..79132f9807 100644 --- a/var/spack/repos/builtin/packages/hwloc/package.py +++ b/var/spack/repos/builtin/packages/hwloc/package.py @@ -2,9 +2,6 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - -from spack import * import sys @@ -28,7 +25,9 @@ class Hwloc(AutotoolsPackage): url = "https://download.open-mpi.org/release/hwloc/v2.0/hwloc-2.0.2.tar.gz" list_url = "http://www.open-mpi.org/software/hwloc/" list_depth = 2 + git = 'https://github.com/open-mpi/hwloc.git' + version('master', branch='master') version('2.0.2', sha256='27dcfe42e3fb3422b72ce48b48bf601c0a3e46e850ee72d9bdd17b5863b6e42c') version('2.0.1', sha256='f1156df22fc2365a31a3dc5f752c53aad49e34a5e22d75ed231cd97eaa437f9d') version('2.0.0', sha256='a0d425a0fc7c7e3f2c92a272ffaffbd913005556b4443e1887d2e1718d902887') @@ -59,7 +58,10 @@ class Hwloc(AutotoolsPackage): ) depends_on('pkgconfig', type='build') - + depends_on('m4', type='build', when='@master') + depends_on('autoconf', type='build', when='@master') + depends_on('automake', type='build', when='@master') + depends_on('libtool', type='build', when='@master') depends_on('cuda', when='+nvml') depends_on('cuda', when='+cuda') depends_on('gl', when='+gl') -- cgit v1.2.3-70-g09d2 From aa83c483f7dde76e590f53b69992e1afa84476db Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Fri, 15 Nov 2019 13:55:51 -0800 Subject: Bugfix/config caching 13754 (#13759) * remove reference to `spack.store` in method definition Referencing `spack.store` in method definition will cache the `spack.config.config` singleton variable too early, before we have a chance to add command line and environment scopes. --- lib/spack/spack/relocate.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index 744338d7e5..434d681035 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -637,8 +637,7 @@ def is_relocatable(spec): return True -def file_is_relocatable( - file, paths_to_relocate=[spack.store.layout.root, spack.paths.prefix]): +def file_is_relocatable(file, paths_to_relocate=None): """Returns True if the file passed as argument is relocatable. Args: @@ -651,6 +650,8 @@ def file_is_relocatable( ValueError: if the file does not exist or the path is not absolute """ + default_paths_to_relocate = [spack.store.layout.root, spack.paths.prefix] + paths_to_relocate = paths_to_relocate or default_paths_to_relocate if not (platform.system().lower() == 'darwin' or platform.system().lower() == 'linux'): -- cgit v1.2.3-70-g09d2 From eb22abc5211a2600f62d72ca9fb6f013cec9a3f6 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 17 Nov 2019 15:30:08 -0700 Subject: Document use of the maintainers field (#13748) --- lib/spack/docs/packaging_guide.rst | 15 +++++++++++++++ lib/spack/spack/cmd/create.py | 4 ++++ 2 files changed, 19 insertions(+) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 767a744baa..7fc9d09e2d 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -136,6 +136,10 @@ generates a boilerplate template for your package, and opens up the new homepage = "http://www.example.com" url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2" + # FIXME: Add a list of GitHub accounts to + # notify when the package is updated. + # maintainers = ['github_user1', 'github_user2'] + version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5') version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d') version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048') @@ -184,6 +188,17 @@ The rest of the tasks you need to do are as follows: The ``homepage`` is displayed when users run ``spack info`` so that they can learn more about your package. +#. Add a comma-separated list of maintainers. + + The ``maintainers`` field is a list of GitHub accounts of people + who want to be notified any time the package is modified. When a + pull request is submitted that updates the package, these people + will be requested to review the PR. This is useful for developers + who maintain a Spack package for their own software, as well as + users who rely on a piece of software and want to ensure that the + package doesn't break. It also gives users a list of people to + contact for help when someone reports a build error with the package. + #. Add ``depends_on()`` calls for the package's dependencies. ``depends_on`` tells Spack that other packages need to be built diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 6c68617acd..ee0fb3a347 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -60,6 +60,10 @@ class {class_name}({base_class_name}): homepage = "https://www.example.com" {url_def} + # FIXME: Add a list of GitHub accounts to + # notify when the package is updated. + # maintainers = ['github_user1', 'github_user2'] + {versions} {dependencies} -- cgit v1.2.3-70-g09d2 From 9c5b58350804b7179ae7f12fffb2a55fff626110 Mon Sep 17 00:00:00 2001 From: "Daryl W. Grunau" Date: Mon, 11 Nov 2019 16:47:47 -0700 Subject: verify.py: os.path.exists exception handling (#13656) --- lib/spack/spack/verify.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/verify.py b/lib/spack/spack/verify.py index ec604a3240..88b89cf17c 100644 --- a/lib/spack/spack/verify.py +++ b/lib/spack/spack/verify.py @@ -28,24 +28,26 @@ def compute_hash(path): def create_manifest_entry(path): data = {} - stat = os.stat(path) - data['mode'] = stat.st_mode - data['owner'] = stat.st_uid - data['group'] = stat.st_gid + if os.path.exists(path): + stat = os.stat(path) - if os.path.islink(path): - data['type'] = 'link' - data['dest'] = os.readlink(path) + data['mode'] = stat.st_mode + data['owner'] = stat.st_uid + data['group'] = stat.st_gid - elif os.path.isdir(path): - data['type'] = 'dir' + if os.path.islink(path): + data['type'] = 'link' + data['dest'] = os.readlink(path) - else: - data['type'] = 'file' - data['hash'] = compute_hash(path) - data['time'] = stat.st_mtime - data['size'] = stat.st_size + elif os.path.isdir(path): + data['type'] = 'dir' + + else: + data['type'] = 'file' + data['hash'] = compute_hash(path) + data['time'] = stat.st_mtime + data['size'] = stat.st_size return data -- cgit v1.2.3-70-g09d2 From fba963f1d10a7d725af3db4d449451003371fd23 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 26 Nov 2019 18:09:24 -0700 Subject: use semicolons instead of newlines in module/python command (#13904) --- lib/spack/spack/util/module_cmd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py index 75f61a6466..4ff6b0de43 100644 --- a/lib/spack/spack/util/module_cmd.py +++ b/lib/spack/spack/util/module_cmd.py @@ -17,7 +17,7 @@ import llnl.util.tty as tty # This list is not exhaustive. Currently we only use load and unload # If we need another option that changes the environment, add it here. module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse'] -py_cmd = "'import os\nimport json\nprint(json.dumps(dict(os.environ)))'" +py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'" # This is just to enable testing. I hate it but we can't find a better way _test_mode = False -- cgit v1.2.3-70-g09d2 From b44df29316c52150702d35026e8da7b84fd8632b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 11 Nov 2019 06:02:36 -0600 Subject: environments: don't try to modify run-env if a spec is not installed (#13589) Fixes #13529 Fixes #13509 --- lib/spack/spack/environment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index f16599c215..55d513ee27 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1117,7 +1117,7 @@ class Environment(object): self.default_view)) for _, spec in self.concretized_specs(): - if spec in self.default_view: + if spec in self.default_view and spec.package.installed: env_mod.extend(self.environment_modifications_for_spec( spec, self.default_view)) @@ -1138,7 +1138,7 @@ class Environment(object): self.default_view).reversed()) for _, spec in self.concretized_specs(): - if spec in self.default_view: + if spec in self.default_view and spec.package.installed: env_mod.extend( self.environment_modifications_for_spec( spec, self.default_view).reversed()) -- cgit v1.2.3-70-g09d2 From c36d9f297fafd1f9a1fd3d309a46eb3ee8987f44 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 7 Nov 2019 17:13:51 -0700 Subject: bugfix: mirror path works for unknown versions (#13626) `mirror_archive_path` was failing to account for the case where the fetched version isn't known to Spack. - [x] don't require the fetched version to be in `Package.versions` - [x] add regression test for mirror paths when package does not have a version --- lib/spack/spack/mirror.py | 4 ++-- lib/spack/spack/test/mirror.py | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 0c5e2a9cc2..f7e8e73ea9 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -303,8 +303,8 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None): storage path of the resource associated with the specified ``fetcher``.""" ext = None if spec: - ext = spec.package.versions[spec.package.version].get( - 'extension', None) + versions = spec.package.versions.get(spec.package.version, {}) + ext = versions.get('extension', None) # If the spec does not explicitly specify an extension (the default case), # then try to determine it automatically. An extension can only be # specified for the primary source of the package (e.g. the source code diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index da40cfac37..9068db7193 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -146,6 +146,12 @@ def test_all_mirror( repos.clear() +def test_mirror_archive_paths_no_version(mock_packages, config, mock_archive): + spec = Spec('trivial-install-test-package@nonexistingversion') + fetcher = spack.fetch_strategy.URLFetchStrategy(mock_archive.url) + spack.mirror.mirror_archive_paths(fetcher, 'per-package-ref', spec) + + def test_mirror_with_url_patches(mock_packages, config, monkeypatch): spec = Spec('patch-several-dependencies') spec.concretize() -- cgit v1.2.3-70-g09d2 From a93a6136682debfd58d295e3035ad2c6d142bda1 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 2 Dec 2019 23:05:02 +0100 Subject: Speedup environment activation (#13557) * Add a transaction around repeated calls to `spec.prefix` in the activation process * cache the computation of home in the python package to speed up setting deps * ensure that module-scope variables are only set *once* per module --- lib/spack/spack/build_environment.py | 9 +++++++++ lib/spack/spack/environment.py | 6 ++++-- var/spack/repos/builtin/packages/python/package.py | 10 ++++++++-- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 84fc58587e..13ee99d177 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -422,6 +422,11 @@ def set_build_environment_variables(pkg, env, dirty): def _set_variables_for_single_module(pkg, module): """Helper function to set module variables for single module.""" + # Put a marker on this module so that it won't execute the body of this + # function again, since it is not needed + marker = '_set_run_already_called' + if getattr(module, marker, False): + return jobs = spack.config.get('config:build_jobs') if pkg.parallel else 1 jobs = min(jobs, multiprocessing.cpu_count()) @@ -489,6 +494,10 @@ def _set_variables_for_single_module(pkg, module): m.static_to_shared_library = static_to_shared_library + # Put a marker on this module so that it won't execute the body of this + # function again, since it is not needed + setattr(m, marker, True) + def set_module_variables_for_package(pkg): """Populate the module scope of install() with some useful functions. diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 55d513ee27..2c7a0cf098 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -159,7 +159,8 @@ def activate( cmds += 'export PS1="%s ${PS1}";\n' % prompt if add_view and default_view_name in env.views: - cmds += env.add_default_view_to_shell(shell) + with spack.store.db.read_transaction(): + cmds += env.add_default_view_to_shell(shell) return cmds @@ -207,7 +208,8 @@ def deactivate(shell='sh'): cmds += 'fi;\n' if default_view_name in _active_environment.views: - cmds += _active_environment.rm_default_view_from_shell(shell) + with spack.store.db.read_transaction(): + cmds += _active_environment.rm_default_view_from_shell(shell) tty.debug("Deactivated environmennt '%s'" % _active_environment.name) _active_environment = None diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 15901872e9..a92d1c6c79 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -163,6 +163,9 @@ class Python(AutotoolsPackage): _DISTUTIL_CACHE_FILENAME = 'sysconfig.json' _distutil_vars = None + # Used to cache home locations, since computing them might be expensive + _homes = {} + # An in-source build with --enable-optimizations fails for python@3.X build_directory = 'spack-build' @@ -622,8 +625,11 @@ class Python(AutotoolsPackage): ``packages.yaml`` unknowingly. Query the python executable to determine exactly where it is installed.""" - prefix = self.get_config_var('prefix') - return Prefix(prefix) + dag_hash = self.spec.dag_hash() + if dag_hash not in self._homes: + prefix = self.get_config_var('prefix') + self._homes[dag_hash] = Prefix(prefix) + return self._homes[dag_hash] @property def libs(self): -- cgit v1.2.3-70-g09d2 From 03a5771b9d38ba3e01385a7548120b3a71e604eb Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Wed, 4 Dec 2019 20:17:40 -0700 Subject: Bugfix: allow missing modules if they are blacklisted (#13540) `spack module loads` and `spack module find` previously failed if any upstream modules were missing. This prevented it from being used with upstreams (or, really, any spack instance) that blacklisted modules. This PR makes module finding is now more lenient (especially for blacklisted modules). - `spack module find` now does not report an error if the spec is blacklisted - instead, it prints a single warning if any modules will be omitted from the loads file - It comments the missing modules out of the loads file so the user can see what's missing - Debug messages are also printed so users can check this with `spack -d...` - also added tests for new functionality --- lib/spack/spack/cmd/modules/__init__.py | 54 +++++++++++++++++++++++++-------- lib/spack/spack/modules/common.py | 50 ++++++++++++++++++++++++------ lib/spack/spack/test/cmd/module.py | 29 ++++++++++++++++++ lib/spack/spack/test/modules/common.py | 12 +++----- 4 files changed, 115 insertions(+), 30 deletions(-) diff --git a/lib/spack/spack/cmd/modules/__init__.py b/lib/spack/spack/cmd/modules/__init__.py index 4f1e640f6c..fbf93e9b2b 100644 --- a/lib/spack/spack/cmd/modules/__init__.py +++ b/lib/spack/spack/cmd/modules/__init__.py @@ -111,6 +111,14 @@ def one_spec_or_raise(specs): return specs[0] +_missing_modules_warning = ( + "Modules have been omitted for one or more specs, either" + " because they were blacklisted or because the spec is" + " associated with a package that is installed upstream and" + " that installation has not generated a module file. Rerun" + " this command with debug output enabled for more details.") + + def loads(module_type, specs, args, out=sys.stdout): """Prompt the list of modules associated with a list of specs""" @@ -131,7 +139,9 @@ def loads(module_type, specs, args, out=sys.stdout): ) modules = list( - (spec, spack.modules.common.get_module(module_type, spec, False)) + (spec, + spack.modules.common.get_module( + module_type, spec, get_full_path=False, required=False)) for spec in specs) module_commands = { @@ -145,15 +155,24 @@ def loads(module_type, specs, args, out=sys.stdout): } exclude_set = set(args.exclude) - prompt_template = '{comment}{exclude}{command}{prefix}{name}' + load_template = '{comment}{exclude}{command}{prefix}{name}' for spec, mod in modules: - d['exclude'] = '## ' if spec.name in exclude_set else '' - d['comment'] = '' if not args.shell else '# {0}\n'.format( - spec.format()) - d['name'] = mod - out.write(prompt_template.format(**d)) + if not mod: + module_output_for_spec = ( + '## blacklisted or missing from upstream: {0}'.format( + spec.format())) + else: + d['exclude'] = '## ' if spec.name in exclude_set else '' + d['comment'] = '' if not args.shell else '# {0}\n'.format( + spec.format()) + d['name'] = mod + module_output_for_spec = load_template.format(**d) + out.write(module_output_for_spec) out.write('\n') + if not all(mod for _, mod in modules): + tty.warn(_missing_modules_warning) + def find(module_type, specs, args): """Retrieve paths or use names of module files""" @@ -161,18 +180,27 @@ def find(module_type, specs, args): single_spec = one_spec_or_raise(specs) if args.recurse_dependencies: - specs_to_retrieve = list( - single_spec.traverse(order='post', cover='nodes', + dependency_specs_to_retrieve = list( + single_spec.traverse(root=False, order='post', cover='nodes', deptype=('link', 'run'))) else: - specs_to_retrieve = [single_spec] + dependency_specs_to_retrieve = [] try: - modules = [spack.modules.common.get_module(module_type, spec, - args.full_path) - for spec in specs_to_retrieve] + modules = [ + spack.modules.common.get_module( + module_type, spec, args.full_path, required=False) + for spec in dependency_specs_to_retrieve] + + modules.append( + spack.modules.common.get_module( + module_type, single_spec, args.full_path, required=True)) except spack.modules.common.ModuleNotFoundError as e: tty.die(e.message) + + if not all(modules): + tty.warn(_missing_modules_warning) + modules = list(x for x in modules if x) print(' '.join(modules)) diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 434d09c125..2e0090a449 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -312,20 +312,45 @@ class UpstreamModuleIndex(object): module_index = self.module_indices[db_index] module_type_index = module_index.get(module_type, {}) if not module_type_index: - raise ModuleNotFoundError( + tty.debug( "No {0} modules associated with the Spack instance where" " {1} is installed".format(module_type, spec)) + return None if spec.dag_hash() in module_type_index: return module_type_index[spec.dag_hash()] else: - raise ModuleNotFoundError( + tty.debug( "No module is available for upstream package {0}".format(spec)) + return None -def get_module(module_type, spec, get_full_path): +def get_module(module_type, spec, get_full_path, required=True): + """Retrieve the module file for a given spec and module type. + + Retrieve the module file for the given spec if it is available. If the + module is not available, this will raise an exception unless the module + is blacklisted or if the spec is installed upstream. + + Args: + module_type: the type of module we want to retrieve (e.g. lmod) + spec: refers to the installed package that we want to retrieve a module + for + required: if the module is required but blacklisted, this function will + print a debug message. If a module is missing but not blacklisted, + then an exception is raised (regardless of whether it is required) + get_full_path: if ``True``, this returns the full path to the module. + Otherwise, this returns the module name. + + Returns: + The module name or path. May return ``None`` if the module is not + available. + """ if spec.package.installed_upstream: - module = spack.modules.common.upstream_module_index.upstream_module( - spec, module_type) + module = (spack.modules.common.upstream_module_index + .upstream_module(spec, module_type)) + if not module: + return None + if get_full_path: return module.path else: @@ -333,10 +358,17 @@ def get_module(module_type, spec, get_full_path): else: writer = spack.modules.module_types[module_type](spec) if not os.path.isfile(writer.layout.filename): - err_msg = "No module available for package {0} at {1}".format( - spec, writer.layout.filename - ) - raise ModuleNotFoundError(err_msg) + if not writer.conf.blacklisted: + err_msg = "No module available for package {0} at {1}".format( + spec, writer.layout.filename + ) + raise ModuleNotFoundError(err_msg) + elif required: + tty.debug("The module configuration has blacklisted {0}: " + "omitting it".format(spec)) + else: + return None + if get_full_path: return writer.layout.filename else: diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py index ac8b48a9e8..6f2ffcbf85 100644 --- a/lib/spack/spack/test/cmd/module.py +++ b/lib/spack/spack/test/cmd/module.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os.path +import re import pytest @@ -144,6 +145,34 @@ def test_find_recursive(): assert len(out.split()) > 1 +@pytest.mark.db +def test_find_recursive_blacklisted(database, module_configuration): + module_configuration('blacklist') + + module('lmod', 'refresh', '-y', '--delete-tree') + module('lmod', 'find', '-r', 'mpileaks ^mpich') + + +@pytest.mark.db +def test_loads_recursive_blacklisted(database, module_configuration): + module_configuration('blacklist') + + module('lmod', 'refresh', '-y', '--delete-tree') + output = module('lmod', 'loads', '-r', 'mpileaks ^mpich') + lines = output.split('\n') + + assert any(re.match(r'[^#]*module load.*mpileaks', l) for l in lines) + assert not any(re.match(r'[^#]module load.*callpath', l) for l in lines) + assert any(re.match(r'## blacklisted or missing.*callpath', l) + for l in lines) + + # TODO: currently there is no way to separate stdout and stderr when + # invoking a SpackCommand. Supporting this requires refactoring + # SpackCommand, or log_output, or both. + # start_of_warning = spack.cmd.modules._missing_modules_warning[:10] + # assert start_of_warning not in output + + # Needed to make the 'module_configuration' fixture below work writer_cls = spack.modules.lmod.LmodModulefileWriter diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py index 604c19cca8..fdaf898daf 100644 --- a/lib/spack/spack/test/modules/common.py +++ b/lib/spack/spack/test/modules/common.py @@ -10,8 +10,7 @@ import collections import spack.spec import spack.modules.tcl -from spack.modules.common import ( - UpstreamModuleIndex, ModuleNotFoundError) +from spack.modules.common import UpstreamModuleIndex import spack.error @@ -133,18 +132,15 @@ module_index: assert m1.path == '/path/to/a' # No modules are defined for the DB associated with s2 - with pytest.raises(ModuleNotFoundError): - upstream_index.upstream_module(s2, 'tcl') + assert not upstream_index.upstream_module(s2, 'tcl') # Modules are defined for the index associated with s1, but none are # defined for the requested type - with pytest.raises(ModuleNotFoundError): - upstream_index.upstream_module(s1, 'lmod') + assert not upstream_index.upstream_module(s1, 'lmod') # A module is registered with a DB and the associated module index has # modules of the specified type defined, but not for the requested spec - with pytest.raises(ModuleNotFoundError): - upstream_index.upstream_module(s3, 'tcl') + assert not upstream_index.upstream_module(s3, 'tcl') # The spec isn't recorded as installed in any of the DBs with pytest.raises(spack.error.SpackError): -- cgit v1.2.3-70-g09d2 From 46b68263d87545771c403fba84e085bb1d422de6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 4 Dec 2019 21:20:09 -0800 Subject: version bump: 0.13.2 --- lib/spack/spack/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 37af6630cf..f7caf373e3 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -5,7 +5,7 @@ #: major, minor, patch version for Spack, in a tuple -spack_version_info = (0, 13, 1) +spack_version_info = (0, 13, 2) #: String containing Spack version joined with .'s spack_version = '.'.join(str(v) for v in spack_version_info) -- cgit v1.2.3-70-g09d2