From a3714b32921896a0da0fde7597c089c0473897f3 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 14 Apr 2021 23:26:02 -0700 Subject: updates for new tutorial update s3 bucket update tutorial branch --- lib/spack/spack/cmd/tutorial.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py index 2a32242c2d..6c0aa40a4a 100644 --- a/lib/spack/spack/cmd/tutorial.py +++ b/lib/spack/spack/cmd/tutorial.py @@ -25,8 +25,8 @@ level = "long" # tutorial configuration parameters -tutorial_branch = "releases/v0.15" -tutorial_mirror = "s3://spack-tutorial-container/mirror/" +tutorial_branch = "releases/v0.16" +tutorial_mirror = "s3://spack-binaries-prs/tutorial/ecp21/mirror" tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub") # configs to remove -- cgit v1.2.3-60-g2f50 From 393a105c067f4358b3e3e9663f920ee430b627c4 Mon Sep 17 00:00:00 2001 From: Gregory Becker Date: Wed, 14 Apr 2021 23:23:38 -0700 Subject: update tutorial public key --- share/spack/keys/tutorial.pub | 61 ++++++++++++++++++------------------------- 1 file changed, 26 insertions(+), 35 deletions(-) diff --git a/share/spack/keys/tutorial.pub b/share/spack/keys/tutorial.pub index 4add41cf36..57bcef38da 100644 --- a/share/spack/keys/tutorial.pub +++ b/share/spack/keys/tutorial.pub @@ -1,38 +1,29 @@ -----BEGIN PGP PUBLIC KEY BLOCK----- -mQENBF1IgqcBCADqSIBM0TT4+6Acv6SUpQ2l1Ql+UVRtJ74VGFOw+8I8aBWcBryB -wNsS/Drxn9M9rX8il2aGtAmwc1dhTh0JvdZO7KqG8Q4vvWOytdLnGSE61LV4147q -S/dJiYH2DCvhMKpOByIsEiuoTrUHzd1EQBnEPSwAQV8oWPrc1++f3iYmRemsOBCT -BldAu7Y5RwjI3qQ6GazoCF5rd1uyiMYrpT4amEKFE91VRe+IG8XfEaSTapOc/hO3 -Sw4fzPelA2qD12I+JMj56vM0fQy3TXD5qngIb+leb2jGI+0bTz8RGS0xSMYVvftA -upzQPaQIfzijVBt3tFSayx/NXKR0p+EuCqGBABEBAAG0MFNwYWNrIEJ1aWxkIFBp -cGVsaW5lIChEZW1vIEtleSkgPGtleUBzcGFjay5kZW1vPokBTgQTAQgAOBYhBDHI -4nh6FErErdiO0pX4aBGV4jnYBQJdSIKnAhsvBQsJCAcCBhUKCQgLAgQWAgMBAh4B -AheAAAoJEJX4aBGV4jnYpf0IAJDYEjpm0h1pNswTvmnEhgNVbojCGRfAts7F5uf8 -IFXGafKQsekMWZh0Ig0YXVn72jsOuNK/+keErMfXM3DFNTq0Ki7mcFedR9r5EfLf -4YW2n6mphsfMgsg8NwKVLFYWyhQQ4OzhdydPxkGVhEebHwfHNQ3aIcqbFmzkhxnX -CIYh2Flf3T306tKX4lXbhsXKG1L/bLtDiFRaMCBp66HGZ8u9Dbyy/W8aDwyx4duD -MG+y2OrhOf+zEu3ZPFyc/jsjmfnUtIfQVyRajh/8vh+i9fkvFlLaOQittNElt3z1 -8+ybGjE9qWY/mvR2ZqnP8SVkGvxSpBVfVXiFFdepvuPAcLu5AQ0EXUiCpwEIAJ2s -npNBAVocDUSdOF/Z/eCRvy3epuYm5f1Ge1ao9K2qWYno2FatnsYxK4qqB5yGRkfj -sEzAGP8JtJvqDSuB5Xk7CIjRNOwoSB3hqvmxWh2h+HsITUhMl11FZ0Cllz+etXcK -APz2ZHSKnA3R8uf4JzIr1cHLS+gDBoj8NgBCZhcyva2b5UC///FLm1+/Lpvekd0U -n7B524hbXhFUG+UMfHO/U1c4TvCMt7RGMoWUtRzfO6XB1VQCwWJBVcVGl8Yy59Zk -3K76VbFWQWOq6fRBE0xHBAga7pOgCc9qrb+FGl1IHUT8aV8CzkxckHlNb3PlntmE -lXZLPcGFWaPtGtuIJVsAEQEAAYkCbAQYAQgAIBYhBDHI4nh6FErErdiO0pX4aBGV -4jnYBQJdSIKnAhsuAUAJEJX4aBGV4jnYwHQgBBkBCAAdFiEEneR3pKqi9Rnivv07 -CYCNVr37XP0FAl1IgqcACgkQCYCNVr37XP13RQf/Ttxidgo9upF8jxrWnT5YhM6D -ozzGWzqE+/KDBX+o4f33o6uzozjESRXQUKdclC9ftDJQ84lFTMs3Z+/12ZDqCV2k -2qf0VfXg4e5xMq4tt6hojXUeYSfeGZXNU9LzjURCcMD+amIKjVztFg4kl3KHW3Pi -/aPTr4xWWgy2tZ1FDEuA5J6AZiKKJSVeoSPOGANouPqm4fNj273XFXQepIhQ5wve -4No0abxfXcLt5Yp3y06rNCBC9QdC++19N5+ajn2z9Qd2ZwztPb0mNuqHAok4vrlE -1c4WBWk93Nfy9fKImalGENpPDz0td2H9pNC9IafOWltGSWSINRrU1GeaNXS/uAOT -CADjcDN+emLbDTTReW4FLoQ0mPJ0tACgszGW50PtncTMPSj4uxSktQPWWk41oD9q -gpXm1Vgto4GvPWYs/ewR6Kyd8K0YkBxbRFyYOmycu3/zzYJnry+EHdvtQspwUDPg -QlI/avDrncERzICsbd86Jz0CMY4kzpg5v9dt/N6WnHlSk/S+vv4pPUDSz26Q4Ehh -iDvDavLGyzKSlVzWQ4bzzlQxXbDL6TZyVAQ4DBI4sI+WGtLbfD51EI5G9BfmDsbw -XJ0Dt2yEwRfDUx/lYbAMvhUnWEu2DSpYdJb8GG0GKTGqU4YpvO1JgTCsLSLIAHfT -tQMw04Gs+kORRNbggsdTD4sR -=N5Wp +mQINBFoGPeUBEACtR/y+HqInBMMmhoCIoaltG3PnOpRtuLIaiUWRgVCJ3ZbkSNDK +n2SWgnzQOuh4TeHgr9YEOiPdN8DYNI0Cp+IY2v73cxdMEHPHtVAGJn6fzmEnmFal +vTSUAgekYfrtJKTtURJ6l+Z1jaX1jqpKY+FTQvdyp08oJiEeDhL57Tx5s0QChvMK +A+2Hdkiz/FeSVDakQaEil4CBB7oC+MQsUphQUUEVGaLd91iHMsPF78J2SXMJ2E/i +xz+jcSLnyacaRyQxUBGMY0e4AFNq53GE6aC0bqso5q8JnZH9PP5IiH6XqeRiWCYb +GqvhW322pDrC83aHHz7dzCafBvcdh0/TlN7L5Qr+zRGVU5X/EdEeSruO0OylzXJp +RxPc1WW5uY0YMayPXtiESLs9ElczF34aXX5mLokvgcuroIJQY+oUoU4p4r+23gJw +HsQCCr9vUt1j7SOHhiYhFlMHpTvpaXyPAq6kah/iGl9okA4V6QPI5upPrwLdVWMZ +LQEYfGAuCfwZub0Mz2qZnvjiew0dCDImMLV8GOdJDN1Ui74OxesHHakdyqbOlSt7 +Z4XatINbnEM5VE1VBUlseOAHu/YTmsqnBu4w7//Ys5I4mFmKvVWgURRjtArE+Z05 +Voa0pD2wDb+5R7DkDqMLECJuxjD+X33vNHMOrA9qcef1WZ3CLtQHqs+abQARAQAB +tDdzYy10dXRvcmlhbCAoR1BHIGNyZWF0ZWQgZm9yIFNwYWNrKSA8YmVja2VyMzNA +bGxubC5nb3Y+iQI3BBMBCAAhBQJaBj3lAhsDBQsJCAcCBhUICQoLAgQWAgMBAh4B +AheAAAoJEJz6SkU7fGmyAr0P/159TwSgTJx0op7B+qhQ1UGac1yktFRyBaQmzkrl +sVH7HxB2Rsq0ty6uV0PA6JlRRb4efEKMsNLTIx6azNYgUgWY+gf/XXdde2romaHB +ZK1uFIp9A108HlqCp+Phwtbdp3NmYiTcpH9ZqpWUmz9dWw37zJkqbqi5xLmbqToM ++hQ49WsjGTE4ngLwwUm8Bxz+nFcyJHjZ1u4KLxTqMNoNjpWbDhM5mDuCYvf7Arwa +IE7Kk3jaYbrsCOw7Qz0oGAp41kKY3WedFfFhqfUNUR+p0o9N79tfkeQjxWK+Ds0W +H24d8HUT8n8lHlXo/0DfR9h9VHpnDo9VfXJyVIqr0uzU9FGHzJ/S8WddzgvxnOBl +Ia4fZTOk+KeQsKJ+Q/X6577F6/S0bQ48D6S09/6HAac6tyrnB3RRPETvGNPOHnA0 ++P8WuhKjojqBO0N8VsTweCCjWDYlF7MEGsHvWiyYYq9gdGY3crWPv8NjMyj+wYhK +NgF7IHIIgZszRyw2hRlwOdC8w+06AhpDivhE4n6wQlO7ScydQpZm44JJtqGO+cjo +6akvjsFe5hV1GmeCNp46GYDe2zQ1O2/50prYcNaRfJ9PsfaNmyTGg4NVqBbPNkwR +uXGiAS2qgeqfS3wORXexHCwYFB2Lcu0qDv7MJosM0cI1saY/eff3+Uw/AkqV51QY +0ajT +=Fc8M -----END PGP PUBLIC KEY BLOCK----- - -- cgit v1.2.3-60-g2f50 From 8c05387ebcda7a556c24896d23795bd534f4f8b4 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Mon, 22 Feb 2021 14:00:59 -0800 Subject: respect -k/verify-ssl-false in _existing_url method (#21864) --- lib/spack/spack/fetch_strategy.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index f075b893ae..1b94206fbf 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -324,6 +324,8 @@ class URLFetchStrategy(FetchStrategy): # Telling curl to fetch the first byte (-r 0-0) is supposed to be # portable. curl_args = ['--stderr', '-', '-s', '-f', '-r', '0-0', url] + if not spack.config.get('config:verify_ssl'): + curl_args.append('-k') _ = curl(*curl_args, fail_on_error=False, output=os.devnull) return curl.returncode == 0 -- cgit v1.2.3-60-g2f50 From 566becbbfe56080498abd87216cb03eaea271a31 Mon Sep 17 00:00:00 2001 From: Phil Tooley <32297355+ptooley@users.noreply.github.com> Date: Tue, 23 Feb 2021 23:20:49 +0000 Subject: use package supplied autogen.sh (#20319) --- var/spack/repos/builtin/packages/numactl/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/numactl/package.py b/var/spack/repos/builtin/packages/numactl/package.py index 8fbfbdfeea..5070a9bfca 100644 --- a/var/spack/repos/builtin/packages/numactl/package.py +++ b/var/spack/repos/builtin/packages/numactl/package.py @@ -12,6 +12,8 @@ class Numactl(AutotoolsPackage): homepage = "http://oss.sgi.com/projects/libnuma/" url = "https://github.com/numactl/numactl/archive/v2.0.11.tar.gz" + force_autoreconf = True + version('2.0.14', sha256='1ee27abd07ff6ba140aaf9bc6379b37825e54496e01d6f7343330cf1a4487035') version('2.0.12', sha256='7c3e819c2bdeb883de68bafe88776a01356f7ef565e75ba866c4b49a087c6bdf') version('2.0.11', sha256='3e099a59b2c527bcdbddd34e1952ca87462d2cef4c93da9b0bc03f02903f7089') @@ -25,6 +27,10 @@ class Numactl(AutotoolsPackage): depends_on('libtool', type='build') depends_on('m4', type='build') + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./autogen.sh') + def patch(self): # Remove flags not recognized by the NVIDIA compiler if self.spec.satisfies('%nvhpc'): -- cgit v1.2.3-60-g2f50 From a59fcd60f5a47b8a7e5ffa561a1211dc7a0eb6f7 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 1 Feb 2021 11:30:25 -0600 Subject: Python 3.10 support: collections.abc (#20441) (cherry picked from commit 40a40e0265d6704a7836aeb30a776d66da8f7fe6) --- lib/spack/external/_pytest/assertion/util.py | 7 +++++-- lib/spack/external/_pytest/main.py | 7 +++++-- lib/spack/external/_pytest/python_api.py | 5 ++++- lib/spack/external/jinja2/runtime.py | 8 ++++++-- lib/spack/external/jinja2/sandbox.py | 12 ++++++++++-- lib/spack/external/jinja2/tests.py | 7 ++++++- lib/spack/external/jinja2/utils.py | 8 ++++++-- lib/spack/external/markupsafe/__init__.py | 7 ++++++- lib/spack/external/ruamel/yaml/comments.py | 7 ++++++- lib/spack/external/ruamel/yaml/compat.py | 7 +++++-- lib/spack/external/ruamel/yaml/constructor.py | 19 ++++++++++++------- lib/spack/llnl/util/filesystem.py | 16 +++++++++++----- lib/spack/llnl/util/lang.py | 11 ++++++++--- lib/spack/spack/directives.py | 14 +++++++++++--- lib/spack/spack/solver/asp.py | 8 +++++++- lib/spack/spack/spec.py | 9 ++++++++- lib/spack/spack/test/spec_yaml.py | 9 +++++++-- lib/spack/spack/util/pattern.py | 10 ++++++++-- lib/spack/spack/util/spack_yaml.py | 13 ++++++++++--- 19 files changed, 141 insertions(+), 43 deletions(-) diff --git a/lib/spack/external/_pytest/assertion/util.py b/lib/spack/external/_pytest/assertion/util.py index 9f00929073..c09eff06b0 100644 --- a/lib/spack/external/_pytest/assertion/util.py +++ b/lib/spack/external/_pytest/assertion/util.py @@ -5,9 +5,12 @@ import pprint import _pytest._code import py try: - from collections import Sequence + from collections.abc import Sequence except ImportError: - Sequence = list + try: + from collections import Sequence + except ImportError: + Sequence = list u = py.builtin._totext diff --git a/lib/spack/external/_pytest/main.py b/lib/spack/external/_pytest/main.py index eacae8dab1..98aa28eb34 100644 --- a/lib/spack/external/_pytest/main.py +++ b/lib/spack/external/_pytest/main.py @@ -10,9 +10,12 @@ from _pytest import nodes import _pytest._code import py try: - from collections import MutableMapping as MappingMixin + from collections.abc import MutableMapping as MappingMixin except ImportError: - from UserDict import DictMixin as MappingMixin + try: + from collections import MutableMapping as MappingMixin + except ImportError: + from UserDict import DictMixin as MappingMixin from _pytest.config import directory_arg, UsageError, hookimpl from _pytest.outcomes import exit diff --git a/lib/spack/external/_pytest/python_api.py b/lib/spack/external/_pytest/python_api.py index cfc01193b0..a931b4d2c7 100644 --- a/lib/spack/external/_pytest/python_api.py +++ b/lib/spack/external/_pytest/python_api.py @@ -398,7 +398,10 @@ def approx(expected, rel=None, abs=None, nan_ok=False): __ https://docs.python.org/3/reference/datamodel.html#object.__ge__ """ - from collections import Mapping, Sequence + if sys.version_info >= (3, 3): + from collections.abc import Mapping, Sequence + else: + from collections import Mapping, Sequence from _pytest.compat import STRING_TYPES as String # Delegate the comparison to a class that knows how to deal with the type diff --git a/lib/spack/external/jinja2/runtime.py b/lib/spack/external/jinja2/runtime.py index f9d7a6806c..52dfeaebd6 100644 --- a/lib/spack/external/jinja2/runtime.py +++ b/lib/spack/external/jinja2/runtime.py @@ -315,10 +315,14 @@ class Context(with_metaclass(ContextMeta)): # register the context as mapping if possible try: - from collections import Mapping + from collections.abc import Mapping Mapping.register(Context) except ImportError: - pass + try: + from collections import Mapping + Mapping.register(Context) + except ImportError: + pass class BlockReference(object): diff --git a/lib/spack/external/jinja2/sandbox.py b/lib/spack/external/jinja2/sandbox.py index 93fb9d45f3..b9e5ec495a 100644 --- a/lib/spack/external/jinja2/sandbox.py +++ b/lib/spack/external/jinja2/sandbox.py @@ -14,7 +14,7 @@ """ import types import operator -from collections import Mapping +import sys from jinja2.environment import Environment from jinja2.exceptions import SecurityError from jinja2._compat import string_types, PY2 @@ -23,6 +23,11 @@ from jinja2.utils import Markup from markupsafe import EscapeFormatter from string import Formatter +if sys.version_info >= (3, 3): + from collections.abc import Mapping +else: + from collections import Mapping + #: maximum number of items a range may produce MAX_RANGE = 100000 @@ -79,7 +84,10 @@ except ImportError: pass #: register Python 2.6 abstract base classes -from collections import MutableSet, MutableMapping, MutableSequence +if sys.version_info >= (3, 3): + from collections.abc import MutableSet, MutableMapping, MutableSequence +else: + from collections import MutableSet, MutableMapping, MutableSequence _mutable_set_types += (MutableSet,) _mutable_mapping_types += (MutableMapping,) _mutable_sequence_types += (MutableSequence,) diff --git a/lib/spack/external/jinja2/tests.py b/lib/spack/external/jinja2/tests.py index 0adc3d4dbc..d5d6b5b33f 100644 --- a/lib/spack/external/jinja2/tests.py +++ b/lib/spack/external/jinja2/tests.py @@ -10,11 +10,16 @@ """ import operator import re -from collections import Mapping +import sys from jinja2.runtime import Undefined from jinja2._compat import text_type, string_types, integer_types import decimal +if sys.version_info >= (3, 3): + from collections.abc import Mapping +else: + from collections import Mapping + number_re = re.compile(r'^-?\d+(\.\d+)?$') regex_type = type(number_re) diff --git a/lib/spack/external/jinja2/utils.py b/lib/spack/external/jinja2/utils.py index 502a311c08..cff4e783a8 100644 --- a/lib/spack/external/jinja2/utils.py +++ b/lib/spack/external/jinja2/utils.py @@ -482,10 +482,14 @@ class LRUCache(object): # register the LRU cache as mutable mapping if possible try: - from collections import MutableMapping + from collections.abc import MutableMapping MutableMapping.register(LRUCache) except ImportError: - pass + try: + from collections import MutableMapping + MutableMapping.register(LRUCache) + except ImportError: + pass def select_autoescape(enabled_extensions=('html', 'htm', 'xml'), diff --git a/lib/spack/external/markupsafe/__init__.py b/lib/spack/external/markupsafe/__init__.py index 68dc85f612..506326f450 100644 --- a/lib/spack/external/markupsafe/__init__.py +++ b/lib/spack/external/markupsafe/__init__.py @@ -10,10 +10,15 @@ """ import re import string -from collections import Mapping +import sys from markupsafe._compat import text_type, string_types, int_types, \ unichr, iteritems, PY2 +if sys.version_info >= (3, 3): + from collections.abc import Mapping +else: + from collections import Mapping + __version__ = "1.0" __all__ = ['Markup', 'soft_unicode', 'escape', 'escape_silent'] diff --git a/lib/spack/external/ruamel/yaml/comments.py b/lib/spack/external/ruamel/yaml/comments.py index b8a5010ad8..a517072087 100644 --- a/lib/spack/external/ruamel/yaml/comments.py +++ b/lib/spack/external/ruamel/yaml/comments.py @@ -9,7 +9,12 @@ these are not really related, formatting could be factored out as a separate base """ -from collections import MutableSet +import sys + +if sys.version_info >= (3, 3): + from collections.abc import MutableSet +else: + from collections import MutableSet __all__ = ["CommentedSeq", "CommentedMap", "CommentedOrderedMap", "CommentedSet", 'comment_attrib', 'merge_attrib'] diff --git a/lib/spack/external/ruamel/yaml/compat.py b/lib/spack/external/ruamel/yaml/compat.py index a1778bef28..28a981dc43 100644 --- a/lib/spack/external/ruamel/yaml/compat.py +++ b/lib/spack/external/ruamel/yaml/compat.py @@ -12,9 +12,12 @@ try: from ruamel.ordereddict import ordereddict except: try: - from collections import OrderedDict + from collections.abc import OrderedDict except ImportError: - from ordereddict import OrderedDict + try: + from collections import OrderedDict + except ImportError: + from ordereddict import OrderedDict # to get the right name import ... as ordereddict doesn't do that class ordereddict(OrderedDict): diff --git a/lib/spack/external/ruamel/yaml/constructor.py b/lib/spack/external/ruamel/yaml/constructor.py index f809df4bf9..69ad0a74ac 100644 --- a/lib/spack/external/ruamel/yaml/constructor.py +++ b/lib/spack/external/ruamel/yaml/constructor.py @@ -3,7 +3,6 @@ from __future__ import absolute_import from __future__ import print_function -import collections import datetime import base64 import binascii @@ -26,6 +25,12 @@ except (ImportError, ValueError): # for Jython from ruamel.yaml.scalarstring import * # NOQA +if sys.version_info >= (3, 3): + from collections.abc import Hashable +else: + from collections import Hashable + + __all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', 'ConstructorError', 'RoundTripConstructor'] @@ -163,7 +168,7 @@ class BaseConstructor(object): # keys can be list -> deep key = self.construct_object(key_node, deep=True) # lists are not hashable, but tuples are - if not isinstance(key, collections.Hashable): + if not isinstance(key, Hashable): if isinstance(key, list): key = tuple(key) if PY2: @@ -175,7 +180,7 @@ class BaseConstructor(object): "found unacceptable key (%s)" % exc, key_node.start_mark) else: - if not isinstance(key, collections.Hashable): + if not isinstance(key, Hashable): raise ConstructorError( "while constructing a mapping", node.start_mark, "found unhashable key", key_node.start_mark) @@ -959,7 +964,7 @@ class RoundTripConstructor(SafeConstructor): # keys can be list -> deep key = self.construct_object(key_node, deep=True) # lists are not hashable, but tuples are - if not isinstance(key, collections.Hashable): + if not isinstance(key, Hashable): if isinstance(key, list): key = tuple(key) if PY2: @@ -971,7 +976,7 @@ class RoundTripConstructor(SafeConstructor): "found unacceptable key (%s)" % exc, key_node.start_mark) else: - if not isinstance(key, collections.Hashable): + if not isinstance(key, Hashable): raise ConstructorError( "while constructing a mapping", node.start_mark, "found unhashable key", key_node.start_mark) @@ -1003,7 +1008,7 @@ class RoundTripConstructor(SafeConstructor): # keys can be list -> deep key = self.construct_object(key_node, deep=True) # lists are not hashable, but tuples are - if not isinstance(key, collections.Hashable): + if not isinstance(key, Hashable): if isinstance(key, list): key = tuple(key) if PY2: @@ -1015,7 +1020,7 @@ class RoundTripConstructor(SafeConstructor): "found unacceptable key (%s)" % exc, key_node.start_mark) else: - if not isinstance(key, collections.Hashable): + if not isinstance(key, Hashable): raise ConstructorError( "while constructing a mapping", node.start_mark, "found unhashable key", key_node.start_mark) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index d6579555ad..bc55d29be0 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -23,6 +23,12 @@ from llnl.util import tty from llnl.util.lang import dedupe, memoized from spack.util.executable import Executable + +if sys.version_info >= (3, 3): + from collections.abc import Sequence # novm +else: + from collections import Sequence + __all__ = [ 'FileFilter', 'FileList', @@ -1105,7 +1111,7 @@ def find(root, files, recursive=True): Parameters: root (str): The root directory to start searching from - files (str or collections.Sequence): Library name(s) to search for + files (str or Sequence): Library name(s) to search for recurse (bool, optional): if False search only root folder, if True descends top-down from the root. Defaults to True. @@ -1168,7 +1174,7 @@ def _find_non_recursive(root, search_files): # Utilities for libraries and headers -class FileList(collections.Sequence): +class FileList(Sequence): """Sequence of absolute paths to files. Provides a few convenience methods to manipulate file paths. @@ -1411,7 +1417,7 @@ def find_headers(headers, root, recursive=False): """ if isinstance(headers, six.string_types): headers = [headers] - elif not isinstance(headers, collections.Sequence): + elif not isinstance(headers, Sequence): message = '{0} expects a string or sequence of strings as the ' message += 'first argument [got {1} instead]' message = message.format(find_headers.__name__, type(headers)) @@ -1566,7 +1572,7 @@ def find_system_libraries(libraries, shared=True): """ if isinstance(libraries, six.string_types): libraries = [libraries] - elif not isinstance(libraries, collections.Sequence): + elif not isinstance(libraries, Sequence): message = '{0} expects a string or sequence of strings as the ' message += 'first argument [got {1} instead]' message = message.format(find_system_libraries.__name__, @@ -1620,7 +1626,7 @@ def find_libraries(libraries, root, shared=True, recursive=False): """ if isinstance(libraries, six.string_types): libraries = [libraries] - elif not isinstance(libraries, collections.Sequence): + elif not isinstance(libraries, Sequence): message = '{0} expects a string or sequence of strings as the ' message += 'first argument [got {1} instead]' message = message.format(find_libraries.__name__, type(libraries)) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 8d7d5e0767..471f90490f 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -9,13 +9,18 @@ import multiprocessing import os import re import functools -import collections import inspect from datetime import datetime, timedelta from six import string_types import sys +if sys.version_info >= (3, 3): + from collections.abc import Hashable, MutableMapping # novm +else: + from collections import Hashable, MutableMapping + + # Ignore emacs backups when listing modules ignore_modules = [r'^\.#', '~$'] @@ -189,7 +194,7 @@ def memoized(func): @functools.wraps(func) def _memoized_function(*args): - if not isinstance(args, collections.Hashable): + if not isinstance(args, Hashable): # Not hashable, so just call the function. return func(*args) @@ -264,7 +269,7 @@ def key_ordering(cls): @key_ordering -class HashableMap(collections.MutableMapping): +class HashableMap(MutableMapping): """This is a hashable, comparable dictionary. Hash is performed on a tuple of the values in the dictionary.""" diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 41276b5b48..b7793a893f 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -28,10 +28,11 @@ The available directives are: """ -import collections import functools import os.path import re +import sys + from six import string_types import llnl.util.lang @@ -47,6 +48,13 @@ from spack.fetch_strategy import from_kwargs from spack.resource import Resource from spack.version import Version, VersionChecksumError + +if sys.version_info >= (3, 3): + from collections.abc import Sequence # novm +else: + from collections import Sequence + + __all__ = [] #: These are variant names used by Spack internally; packages can't use them @@ -202,7 +210,7 @@ class DirectiveMeta(type): if isinstance(dicts, string_types): dicts = (dicts, ) - if not isinstance(dicts, collections.Sequence): + if not isinstance(dicts, Sequence): message = "dicts arg must be list, tuple, or string. Found {0}" raise TypeError(message.format(type(dicts))) # Add the dictionary names if not already there @@ -243,7 +251,7 @@ class DirectiveMeta(type): # ...so if it is not a sequence make it so values = result - if not isinstance(values, collections.Sequence): + if not isinstance(values, Sequence): values = (values, ) DirectiveMeta._directives_to_be_executed.extend(values) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 18e08b3199..84753c0b25 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -40,6 +40,12 @@ import spack.variant import spack.version +if sys.version_info >= (3, 3): + from collections.abc import Sequence # novm +else: + from collections import Sequence + + class Timer(object): """Simple timer for timing phases of a solve""" def __init__(self): @@ -64,7 +70,7 @@ class Timer(object): def issequence(obj): if isinstance(obj, string_types): return False - return isinstance(obj, (collections.Sequence, types.GeneratorType)) + return isinstance(obj, (Sequence, types.GeneratorType)) def listify(args): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index f0ae6a1431..609f05ca03 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -116,6 +116,13 @@ import spack.util.string import spack.variant as vt import spack.version as vn + +if sys.version_info >= (3, 3): + from collections.abc import Mapping # novm +else: + from collections import Mapping + + __all__ = [ 'Spec', 'parse', @@ -2120,7 +2127,7 @@ class Spec(object): # which likely means the spec was created with Spec.from_detection msg = ('cannot validate "{0}" since it was not created ' 'using Spec.from_detection'.format(self)) - assert isinstance(self.extra_attributes, collections.Mapping), msg + assert isinstance(self.extra_attributes, Mapping), msg # Validate the spec calling a package specific method validate_fn = getattr( diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index 98fb1e68fe..12a3982f06 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -11,8 +11,7 @@ YAML format preserves DAG information in the spec. import ast import inspect import os - -from collections import Iterable, Mapping +import sys import pytest @@ -29,6 +28,12 @@ from spack.util.spack_yaml import syaml_dict from spack.util.mock_package import MockPackageMultiRepo +if sys.version_info >= (3, 3): + from collections.abc import Iterable, Mapping # novm +else: + from collections import Iterable, Mapping + + def check_yaml_round_trip(spec): yaml_text = spec.to_yaml() spec_from_yaml = Spec.from_yaml(yaml_text) diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py index fcbb4e6baa..c0569f5f55 100644 --- a/lib/spack/spack/util/pattern.py +++ b/lib/spack/spack/util/pattern.py @@ -4,8 +4,14 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import inspect -import collections import functools +import sys + + +if sys.version_info >= (3, 3): + from collections.abc import MutableSequence # novm +else: + from collections import MutableSequence class Delegate(object): @@ -52,7 +58,7 @@ def composite(interface=None, method_list=None, container=list): # exception if it doesn't. The patched class returned by the decorator will # inherit from the container class to expose the interface needed to manage # objects composition - if not issubclass(container, collections.MutableSequence): + if not issubclass(container, MutableSequence): raise TypeError("Container must fulfill the MutableSequence contract") # Check if at least one of the 'interface' or the 'method_list' arguments diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py index 40e66ee62c..4e61e02a4e 100644 --- a/lib/spack/spack/util/spack_yaml.py +++ b/lib/spack/spack/util/spack_yaml.py @@ -1,4 +1,4 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -13,7 +13,7 @@ """ import ctypes -import collections +import sys from ordereddict_backport import OrderedDict from six import string_types, StringIO @@ -25,6 +25,13 @@ from llnl.util.tty.color import colorize, clen, cextra import spack.error + +if sys.version_info >= (3, 3): + from collections.abc import Mapping # novm +else: + from collections import Mapping + + # Only export load and dump __all__ = ['load', 'dump', 'SpackYAMLError'] @@ -343,7 +350,7 @@ def sorted_dict(dict_like): """ result = syaml_dict(sorted(dict_like.items())) for key, value in result.items(): - if isinstance(value, collections.Mapping): + if isinstance(value, Mapping): result[key] = sorted_dict(value) return result -- cgit v1.2.3-60-g2f50 From 94bb37c1070c1f032d805e0b7f7622cf736ada14 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 3 Feb 2021 19:12:03 +0100 Subject: concretizer: simplify "fact" method (#21148) The "fact" method before was dealing with multiple facts registered per call, which was used when we were emitting grounded rules from knowledge of the problem instance. Now that the encoding is changed we can simplify the method to deal only with a single fact per call. (cherry picked from commit ba42c36f00fe40c047121a32117018eb93e0c4b1) --- lib/spack/spack/solver/asp.py | 28 +++++----------------------- 1 file changed, 5 insertions(+), 23 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 84753c0b25..e36b5a3d3c 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -210,19 +210,6 @@ class Result(object): *sorted(str(symbol) for symbol in core)) -def _normalize(body): - """Accept an AspAnd object or a single Symbol and return a list of - symbols. - """ - if isinstance(body, clingo.Symbol): - args = [body] - elif hasattr(body, 'symbol'): - args = [body.symbol()] - else: - raise TypeError("Invalid typee: ", type(body)) - return args - - def _normalize_packages_yaml(packages_yaml): normalized_yaml = copy.copy(packages_yaml) for pkg_name in packages_yaml: @@ -280,19 +267,14 @@ class PyclingoDriver(object): def fact(self, head): """ASP fact (a rule without a body).""" - symbols = _normalize(head) - self.out.write("%s.\n" % ','.join(str(a) for a in symbols)) + symbol = head.symbol() if hasattr(head, 'symbol') else head - atoms = {} - for s in symbols: - atoms[s] = self.backend.add_atom(s) + self.out.write("%s.\n" % str(symbol)) - self.backend.add_rule( - [atoms[s] for s in symbols], [], choice=self.cores - ) + atom = self.backend.add_atom(symbol) + self.backend.add_rule([atom], [], choice=self.cores) if self.cores: - for s in symbols: - self.assumptions.append(atoms[s]) + self.assumptions.append(atom) def solve( self, solver_setup, specs, dump=None, nmodels=0, -- cgit v1.2.3-60-g2f50 From 8d131934345abc5f0ee61da45a74fa09c07195bb Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 23 Feb 2021 04:09:43 +0100 Subject: Improve error message for inconsistencies in package.py (#21811) * Improve error message for inconsistencies in package.py Sometimes directives refer to variants that do not exist. Make it such that: 1. The name of the variant 2. The name of the package which is supposed to have such variant 3. The name of the package making this assumption are all printed in the error message for easier debugging. * Add unit tests (cherry picked from commit 7226bd64dc3b46a1ed361f1e9d7fb4a2a5b65200) --- lib/spack/spack/solver/asp.py | 33 ++++++++++++++++++---- lib/spack/spack/test/concretize.py | 12 ++++++++ .../packages/wrong-variant-in-conflicts/package.py | 13 +++++++++ .../wrong-variant-in-depends-on/package.py | 13 +++++++++ 4 files changed, 66 insertions(+), 5 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/wrong-variant-in-conflicts/package.py create mode 100644 var/spack/repos/builtin.mock/packages/wrong-variant-in-depends-on/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index e36b5a3d3c..789a207d1e 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -647,11 +647,15 @@ class SpackSolverSetup(object): self.gen.fact(cond_fn(condition_id, pkg_name, dep_spec.name)) # conditions that trigger the condition - conditions = self.spec_clauses(named_cond, body=True) + conditions = self.checked_spec_clauses( + named_cond, body=True, required_from=pkg_name + ) for pred in conditions: self.gen.fact(require_fn(condition_id, pred.name, *pred.args)) - imposed_constraints = self.spec_clauses(dep_spec) + imposed_constraints = self.checked_spec_clauses( + dep_spec, required_from=pkg_name + ) for pred in imposed_constraints: # imposed "node"-like conditions are no-ops if pred.name in ("node", "virtual_node"): @@ -857,6 +861,20 @@ class SpackSolverSetup(object): self.gen.fact(fn.compiler_version_flag( compiler.name, compiler.version, name, flag)) + def checked_spec_clauses(self, *args, **kwargs): + """Wrap a call to spec clauses into a try/except block that raise + a comprehensible error message in case of failure. + """ + requestor = kwargs.pop('required_from', None) + try: + clauses = self.spec_clauses(*args, **kwargs) + except RuntimeError as exc: + msg = str(exc) + if requestor: + msg += ' [required from package "{0}"]'.format(requestor) + raise RuntimeError(msg) + return clauses + def spec_clauses(self, spec, body=False, transitive=True): """Return a list of clauses for a spec mandates are true. @@ -925,9 +943,14 @@ class SpackSolverSetup(object): # validate variant value reserved_names = spack.directives.reserved_names - if (not spec.virtual and vname not in reserved_names): - variant_def = spec.package.variants[vname] - variant_def.validate_or_raise(variant, spec.package) + if not spec.virtual and vname not in reserved_names: + try: + variant_def = spec.package.variants[vname] + except KeyError: + msg = 'variant "{0}" not found in package "{1}"' + raise RuntimeError(msg.format(vname, spec.name)) + else: + variant_def.validate_or_raise(variant, spec.package) clauses.append(f.variant_value(spec.name, vname, value)) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index dae162a12f..a54a65970f 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1097,3 +1097,15 @@ class TestConcretize(object): # dependency type declared to infer that the dependency holds. s = Spec('test-dep-with-imposed-conditions').concretized() assert 'c' not in s + + @pytest.mark.parametrize('spec_str', [ + 'wrong-variant-in-conflicts', + 'wrong-variant-in-depends-on' + ]) + def test_error_message_for_inconsistent_variants(self, spec_str): + if spack.config.get('config:concretizer') == 'original': + pytest.xfail('Known failure of the original concretizer') + + s = Spec(spec_str) + with pytest.raises(RuntimeError, match='not found in package'): + s.concretize() diff --git a/var/spack/repos/builtin.mock/packages/wrong-variant-in-conflicts/package.py b/var/spack/repos/builtin.mock/packages/wrong-variant-in-conflicts/package.py new file mode 100644 index 0000000000..7a53904f60 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/wrong-variant-in-conflicts/package.py @@ -0,0 +1,13 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class WrongVariantInConflicts(Package): + """This package has a wrong variant spelled in a conflict.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/b-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + conflicts('+foo', when='@1.0') diff --git a/var/spack/repos/builtin.mock/packages/wrong-variant-in-depends-on/package.py b/var/spack/repos/builtin.mock/packages/wrong-variant-in-depends-on/package.py new file mode 100644 index 0000000000..628a761c7c --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/wrong-variant-in-depends-on/package.py @@ -0,0 +1,13 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class WrongVariantInDependsOn(Package): + """This package has a wrong variant spelled in a depends_on.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/b-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + depends_on('b+doesnotexist') -- cgit v1.2.3-60-g2f50 From 14e179398fc8dcfc58ff7f9b95506ccc9aab3359 Mon Sep 17 00:00:00 2001 From: Josh Essman <68349992+joshessman-llnl@users.noreply.github.com> Date: Tue, 23 Feb 2021 17:46:37 -0600 Subject: Updates to support clingo-cffi (#20657) * Support clingo when used with cffi Clingo recently merged in a new Python module option based on cffi. Compatibility with this module requires a few changes to spack - it does not automatically convert strings/ints/etc to Symbol and clingo.Symbol.string throws on failure. manually convert str/int to clingo.Symbol types catch stringify exceptions add job for clingo-cffi to Spack CI switch to potassco-vendored wheel for clingo-cffi CI on_unsat argument when cffi (cherry picked from commit 93ed1a410c4a202eab3a68769fd8c0d4ff8b1c8e) --- .github/workflows/linux_unit_tests.yaml | 51 +++++++++++++++++++++++++++++++++ lib/spack/spack/solver/asp.py | 28 ++++++++++++------ 2 files changed, 70 insertions(+), 9 deletions(-) diff --git a/.github/workflows/linux_unit_tests.yaml b/.github/workflows/linux_unit_tests.yaml index c87ea6e07a..5aa7b4877b 100644 --- a/.github/workflows/linux_unit_tests.yaml +++ b/.github/workflows/linux_unit_tests.yaml @@ -138,3 +138,54 @@ jobs: - uses: codecov/codecov-action@v1 with: flags: unittests,linux,clingo + clingo-cffi: + # Test for the clingo based solver using the CFFI package + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install System packages + run: | + sudo apt-get -y update + # Needed for unit tests + sudo apt-get install -y coreutils gfortran graphviz gnupg2 mercurial + sudo apt-get install -y ninja-build patchelf + # Needed for kcov + sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev + sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev + - name: Install Python packages + run: | + pip install --upgrade pip six setuptools codecov coverage cffi + pip install -i https://test.pypi.org/simple/ clingo-cffi + - name: Setup git configuration + run: | + # Need this for the git tests to succeed. + git --version + . .github/workflows/setup_git.sh + - name: Install kcov for bash script coverage + env: + KCOV_VERSION: 34 + run: | + KCOV_ROOT=$(mktemp -d) + wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz + tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz + mkdir -p ${KCOV_ROOT}/build + cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - + make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install + - name: Run unit tests + run: | + whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER + python -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)" + . share/spack/setup-env.sh + spack compiler find + spack solve mpileaks%gcc + coverage run $(which spack) unit-test -v + coverage combine + coverage xml + - uses: codecov/codecov-action@v1 + with: + flags: unittests,linux,clingo diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 789a207d1e..ac2983cecc 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -18,6 +18,8 @@ import archspec.cpu try: import clingo + # There may be a better way to detect this + clingo_cffi = hasattr(clingo.Symbol, '_rep') except ImportError: clingo = None @@ -119,11 +121,11 @@ class AspFunction(AspObject): def symbol(self, positive=True): def argify(arg): if isinstance(arg, bool): - return str(arg) + return clingo.String(str(arg)) elif isinstance(arg, int): - return arg + return clingo.Number(arg) else: - return str(arg) + return clingo.String(str(arg)) return clingo.Function( self.name, [argify(arg) for arg in self.args], positive=positive) @@ -318,18 +320,26 @@ class PyclingoDriver(object): def on_model(model): models.append((model.cost, model.symbols(shown=True, terms=True))) - solve_result = self.control.solve( - assumptions=self.assumptions, - on_model=on_model, - on_core=cores.append - ) + solve_kwargs = {"assumptions": self.assumptions, + "on_model": on_model, + "on_core": cores.append} + if clingo_cffi: + solve_kwargs["on_unsat"] = cores.append + solve_result = self.control.solve(**solve_kwargs) timer.phase("solve") # once done, construct the solve result result.satisfiable = solve_result.satisfiable def stringify(x): - return x.string or str(x) + if clingo_cffi: + # Clingo w/ CFFI will throw an exception on failure + try: + return x.string + except RuntimeError: + return str(x) + else: + return x.string or str(x) if result.satisfiable: builder = SpecBuilder(specs) -- cgit v1.2.3-60-g2f50 From 2a4d2f905cdc4e65839fdcf0075e822d64d0f0a5 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 24 Feb 2021 21:33:14 +0100 Subject: Run clingo-cffi tests in a container (#21913) There clingo-cffi job has two issues to be solved: 1. It uses the default concretizer 2. It requires a package from https://test.pypi.org/simple/ The former can be fixed by setting the SPACK_TEST_SOLVER environment variable to "clingo". The latter though requires clingo-cffi to be pushed to a more stable package index (since https://test.pypi.org/simple/ is meant as a scratch version of PyPI that can be wiped at any time). For the time being run the tests in a container. Switch back to PyPI whenever a new official version of clingo will be released. --- .github/workflows/linux_unit_tests.yaml | 42 +++++---------------------------- 1 file changed, 6 insertions(+), 36 deletions(-) diff --git a/.github/workflows/linux_unit_tests.yaml b/.github/workflows/linux_unit_tests.yaml index 5aa7b4877b..5df0957659 100644 --- a/.github/workflows/linux_unit_tests.yaml +++ b/.github/workflows/linux_unit_tests.yaml @@ -139,47 +139,17 @@ jobs: with: flags: unittests,linux,clingo clingo-cffi: - # Test for the clingo based solver using the CFFI package + # Test for the clingo based solver (using clingo-cffi) runs-on: ubuntu-latest + container: spack/github-actions:clingo-cffi steps: - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - name: Install System packages - run: | - sudo apt-get -y update - # Needed for unit tests - sudo apt-get install -y coreutils gfortran graphviz gnupg2 mercurial - sudo apt-get install -y ninja-build patchelf - # Needed for kcov - sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev - sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev - - name: Install Python packages - run: | - pip install --upgrade pip six setuptools codecov coverage cffi - pip install -i https://test.pypi.org/simple/ clingo-cffi - - name: Setup git configuration - run: | - # Need this for the git tests to succeed. - git --version - . .github/workflows/setup_git.sh - - name: Install kcov for bash script coverage - env: - KCOV_VERSION: 34 - run: | - KCOV_ROOT=$(mktemp -d) - wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz - tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz - mkdir -p ${KCOV_ROOT}/build - cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - - make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install - name: Run unit tests run: | whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER - python -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)" + python3 -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)" + git clone https://github.com/spack/spack.git && cd spack + git fetch origin ${{ github.ref }}:test-branch + git checkout test-branch . share/spack/setup-env.sh spack compiler find spack solve mpileaks%gcc -- cgit v1.2.3-60-g2f50 From 0678d5df90f8cb9207536d77f00cb8a0eee6a373 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sun, 31 Jan 2021 11:31:57 +0100 Subject: repo: generalize "swap" context manager to also accept paths The method is now called "use_repositories" and makes it clear in the docstring that it accepts as arguments either Repo objects or paths. Since there was some duplication between this contextmanager and "use_repo" in the testing framework, remove the latter and use spack.repo.use_repositories across the entire code base. Make a few adjustment to MockPackageMultiRepo, since it was stating in the docstring that it was supposed to mock spack.repo.Repo and was instead mocking spack.repo.RepoPath. (cherry picked from commit 1a8963b0f4c11c4b7ddd347e6cd95cdc68ddcbe0) --- lib/spack/spack/repo.py | 45 +++++++++++++++++++------------ lib/spack/spack/test/cmd/ci.py | 2 +- lib/spack/spack/test/cmd/env.py | 12 ++++----- lib/spack/spack/test/cmd/module.py | 4 +-- lib/spack/spack/test/cmd/pkg.py | 2 +- lib/spack/spack/test/concretize.py | 2 +- lib/spack/spack/test/conftest.py | 24 ++++++----------- lib/spack/spack/test/database.py | 12 ++++----- lib/spack/spack/test/directory_layout.py | 2 +- lib/spack/spack/test/installer.py | 4 +-- lib/spack/spack/test/package_sanity.py | 2 +- lib/spack/spack/test/spec_dag.py | 8 +++--- lib/spack/spack/test/spec_yaml.py | 2 +- lib/spack/spack/test/test_activations.py | 2 +- lib/spack/spack/test/util/mock_package.py | 2 +- lib/spack/spack/util/mock_package.py | 7 +++++ 16 files changed, 71 insertions(+), 61 deletions(-) diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 4af7b382f0..52b44e0120 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -1255,23 +1255,6 @@ def set_path(repo): return append -@contextlib.contextmanager -def swap(repo_path): - """Temporarily use another RepoPath.""" - global path - - # swap out _path for repo_path - saved = path - remove_from_meta = set_path(repo_path) - - yield - - # restore _path and sys.meta_path - if remove_from_meta: - sys.meta_path.remove(repo_path) - path = saved - - @contextlib.contextmanager def additional_repository(repository): """Adds temporarily a repository to the default one. @@ -1284,6 +1267,34 @@ def additional_repository(repository): path.remove(repository) +@contextlib.contextmanager +def use_repositories(*paths_and_repos): + """Use the repositories passed as arguments within the context manager. + + Args: + *paths_and_repos: paths to the repositories to be used, or + already constructed Repo objects + + Returns: + Corresponding RepoPath object + """ + global path + + # Construct a temporary RepoPath object from + temporary_repositories = RepoPath(*paths_and_repos) + + # Swap the current repository out + saved = path + remove_from_meta = set_path(temporary_repositories) + + yield temporary_repositories + + # Restore _path and sys.meta_path + if remove_from_meta: + sys.meta_path.remove(temporary_repositories) + path = saved + + class RepoError(spack.error.SpackError): """Superclass for repository-related errors.""" diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 4d8468c66c..16af0e99e6 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -73,7 +73,7 @@ and then 'd', 'b', and 'a' to be put in the next three stages, respectively. b = mock_repo.add_package('b', [d, e], [default, default]) mock_repo.add_package('a', [b, c], [default, default]) - with repo.swap(mock_repo): + with repo.use_repositories(mock_repo): spec_a = Spec('a') spec_a.concretize() diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index c2d75d9d1f..72f1b86347 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -330,7 +330,7 @@ def test_env_status_broken_view( # switch to a new repo that doesn't include the installed package # test that Spack detects the missing package and warns the user new_repo = MockPackageMultiRepo() - with spack.repo.swap(new_repo): + with spack.repo.use_repositories(new_repo): output = env('status') assert 'In environment test' in output assert 'Environment test includes out of date' in output @@ -351,7 +351,7 @@ def test_env_activate_broken_view( # switch to a new repo that doesn't include the installed package # test that Spack detects the missing package and fails gracefully new_repo = MockPackageMultiRepo() - with spack.repo.swap(new_repo): + with spack.repo.use_repositories(new_repo): with pytest.raises(SpackCommandError): env('activate', '--sh', 'test') @@ -929,7 +929,7 @@ def test_read_old_lock_and_write_new(tmpdir): y = mock_repo.add_package('y', [], []) mock_repo.add_package('x', [y], [build_only]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): x = Spec('x') x.concretize() @@ -960,7 +960,7 @@ def test_read_old_lock_creates_backup(tmpdir): mock_repo = MockPackageMultiRepo() y = mock_repo.add_package('y', [], []) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): y = Spec('y') y.concretize() @@ -997,7 +997,7 @@ def test_indirect_build_dep(): pass setattr(mock_repo, 'dump_provenance', noop) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): x_spec = Spec('x') x_concretized = x_spec.concretized() @@ -1038,7 +1038,7 @@ def test_store_different_build_deps(): pass setattr(mock_repo, 'dump_provenance', noop) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): y_spec = Spec('y ^z@3') y_concretized = y_spec.concretized() diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py index c3fe279306..9a34368fce 100644 --- a/lib/spack/spack/test/cmd/module.py +++ b/lib/spack/spack/test/cmd/module.py @@ -10,7 +10,7 @@ import pytest import spack.main import spack.modules -from spack.test.conftest import use_store, use_configuration, use_repo +from spack.test.conftest import use_store, use_configuration module = spack.main.SpackCommand('module') @@ -23,7 +23,7 @@ def ensure_module_files_are_there( module = spack.main.SpackCommand('module') with use_store(mock_store): with use_configuration(mock_configuration): - with use_repo(mock_repo_path): + with spack.repo.use_repositories(mock_repo_path): module('tcl', 'refresh', '-y') diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index ca9e3a1a3e..52127e8605 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -82,7 +82,7 @@ def mock_pkg_git_repo(tmpdir_factory): git('-c', 'commit.gpgsign=false', 'commit', '-m', 'change pkg-b, remove pkg-c, add pkg-d') - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): yield mock_repo_packages diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index a54a65970f..e5eaa7b7fb 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -304,7 +304,7 @@ class TestConcretize(object): barpkg = mock_repo.add_package('barpkg', [bazpkg], [default_dep]) mock_repo.add_package('foopkg', [barpkg], [default_dep]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = Spec('foopkg %gcc@4.5.0 os=CNL target=nocona' + ' ^barpkg os=SuSE11 ^bazpkg os=be') spec.concretize() diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index d4f38f2c1b..f214c78c18 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -382,19 +382,12 @@ def use_store(store): spack.store.store = saved -@contextlib.contextmanager -def use_repo(repo): - """Context manager to swap out the global Spack repo path.""" - with spack.repo.swap(repo): - yield - - # # Test-specific fixtures # @pytest.fixture(scope='session') def mock_repo_path(): - yield spack.repo.RepoPath(spack.paths.mock_packages_path) + yield spack.repo.Repo(spack.paths.mock_packages_path) def _pkg_install_fn(pkg, spec, prefix): @@ -411,15 +404,15 @@ def mock_pkg_install(monkeypatch): @pytest.fixture(scope='function') def mock_packages(mock_repo_path, mock_pkg_install): """Use the 'builtin.mock' repository instead of 'builtin'""" - with use_repo(mock_repo_path): - yield mock_repo_path + with spack.repo.use_repositories(mock_repo_path) as mock_repo: + yield mock_repo @pytest.fixture(scope='function') def mutable_mock_repo(mock_repo_path): """Function-scoped mock packages, for tests that need to modify them.""" - mock_repo_path = spack.repo.RepoPath(spack.paths.mock_packages_path) - with use_repo(mock_repo_path): + mock_repo = spack.repo.Repo(spack.paths.mock_packages_path) + with spack.repo.use_repositories(mock_repo) as mock_repo_path: yield mock_repo_path @@ -644,7 +637,7 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration, if not os.path.exists(str(store_cache.join('.spack-db'))): with use_configuration(mock_configuration): with use_store(store): - with use_repo(mock_repo_path): + with spack.repo.use_repositories(mock_repo_path): _populate(store.db) store_path.copy(store_cache, mode=True, stat=True) @@ -674,7 +667,7 @@ def mutable_mock_store(tmpdir_factory, mock_repo_path, mock_configuration, if not os.path.exists(str(store_cache.join('.spack-db'))): with use_configuration(mock_configuration): with use_store(store): - with use_repo(mock_repo_path): + with spack.repo.use_repositories(mock_repo_path): _populate(store.db) store_path.copy(store_cache, mode=True, stat=True) @@ -1250,8 +1243,7 @@ repo: namespace: mock_test_repo """) - repo = spack.repo.RepoPath(str(repodir)) - with spack.repo.swap(repo): + with spack.repo.use_repositories(str(repodir)) as repo: yield repo, repodir shutil.rmtree(str(repodir)) diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 4682c9850d..8e0eac7cc0 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -81,7 +81,7 @@ def test_installed_upstream(upstream_and_downstream_db): y = mock_repo.add_package('y', [z], [default]) mock_repo.add_package('w', [x, y], [default, default]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = spack.spec.Spec('w') spec.concretize() @@ -122,7 +122,7 @@ def test_removed_upstream_dep(upstream_and_downstream_db): z = mock_repo.add_package('z', [], []) mock_repo.add_package('y', [z], [default]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = spack.spec.Spec('y') spec.concretize() @@ -155,7 +155,7 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db): mock_repo = MockPackageMultiRepo() mock_repo.add_package('x', [], []) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = spack.spec.Spec('x') spec.concretize() @@ -197,7 +197,7 @@ def test_cannot_write_upstream(tmpdir_factory, test_store, gen_mock_layout): upstream_dbs = spack.store._construct_upstream_dbs_from_install_roots( [roots[1]], _test=True) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = spack.spec.Spec('x') spec.concretize() @@ -216,7 +216,7 @@ def test_recursive_upstream_dbs(tmpdir_factory, test_store, gen_mock_layout): y = mock_repo.add_package('y', [z], [default]) mock_repo.add_package('x', [y], [default]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = spack.spec.Spec('x') spec.concretize() db_c = spack.database.Database(roots[2]) @@ -694,7 +694,7 @@ def test_115_reindex_with_packages_not_in_repo(mutable_database): # Dont add any package definitions to this repository, the idea is that # packages should not have to be defined in the repository once they # are installed - with spack.repo.swap(MockPackageMultiRepo()): + with spack.repo.use_repositories(MockPackageMultiRepo()): spack.store.store.reindex() _check_db_sanity(mutable_database) diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 3a144621eb..3cb976a077 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -194,7 +194,7 @@ def test_handle_unknown_package(layout_and_dir, config, mock_packages): layout.create_install_directory(spec) installed_specs[spec] = layout.path_for_spec(spec) - with spack.repo.swap(mock_db): + with spack.repo.use_repositories(mock_db): # Now check that even without the package files, we know # enough to read a spec from the spec file. for spec, path in installed_specs.items(): diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 3bf818544e..590c4a4a1a 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -474,14 +474,14 @@ def test_packages_needed_to_bootstrap_compiler_packages(install_mockery, assert packages -def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_repo_path): +def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages): """Test happy path for dump_packages with dependencies.""" spec_name = 'simple-inheritance' spec = spack.spec.Spec(spec_name).concretized() inst.dump_packages(spec, str(tmpdir)) - repo = mock_repo_path.repos[0] + repo = mock_packages.repos[0] dest_pkg = repo.filename_for_package_name(spec_name) assert os.path.isfile(dest_pkg) diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index d50169a1a4..ded4d1c485 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -73,7 +73,7 @@ def test_repo_getpkg_names_and_classes(): def test_get_all_mock_packages(): """Get the mock packages once each too.""" db = spack.repo.RepoPath(spack.paths.mock_packages_path) - with spack.repo.swap(db): + with spack.repo.use_repositories(db): check_repo() diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 2ac5bec4d9..854495988b 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -75,7 +75,7 @@ w->y deptypes are (link, build), w->x and y->z deptypes are (test) y = mock_repo.add_package('y', [z], [test_only]) w = mock_repo.add_package('w', [x, y], [test_only, default]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = Spec('w') spec.concretize(tests=(w.name,)) @@ -114,7 +114,7 @@ def test_installed_deps(): b = mock_repo.add_package('b', [d, e], [default, default]) mock_repo.add_package('a', [b, c], [default, default]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): c_spec = Spec('c') c_spec.concretize() assert c_spec['d'].version == spack.version.Version('2') @@ -143,7 +143,7 @@ def test_specify_preinstalled_dep(): b = mock_repo.add_package('b', [c], [default]) mock_repo.add_package('a', [b], [default]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): b_spec = Spec('b') b_spec.concretize() for spec in b_spec.traverse(): @@ -186,7 +186,7 @@ def test_conditional_dep_with_user_constraints(spec_str, expr_str, expected): } mock_repo.add_package('x', [y], [default], conditions=x_on_y_conditions) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): spec = Spec(spec_str) spec.concretize() diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index 12a3982f06..b32fe09e03 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -315,7 +315,7 @@ def test_save_dependency_spec_yamls_subset(tmpdir, config): b = mock_repo.add_package('b', [d, e], [default, default]) mock_repo.add_package('a', [b, c], [default, default]) - with repo.swap(mock_repo): + with repo.use_repositories(mock_repo): spec_a = Spec('a') spec_a.concretize() b_spec = spec_a['b'] diff --git a/lib/spack/spack/test/test_activations.py b/lib/spack/spack/test/test_activations.py index d1780b8963..e8ce6c55ed 100644 --- a/lib/spack/spack/test/test_activations.py +++ b/lib/spack/spack/test/test_activations.py @@ -54,7 +54,7 @@ def builtin_and_mock_packages(): repo_dirs = [spack.paths.packages_path, spack.paths.mock_packages_path] path = RepoPath(*repo_dirs) - with spack.repo.swap(path): + with spack.repo.use_repositories(path): yield diff --git a/lib/spack/spack/test/util/mock_package.py b/lib/spack/spack/test/util/mock_package.py index 376ac581bd..cca55bb534 100644 --- a/lib/spack/spack/test/util/mock_package.py +++ b/lib/spack/spack/test/util/mock_package.py @@ -15,7 +15,7 @@ def test_mock_package_possible_dependencies(): b = mock_repo.add_package('b', [d]) a = mock_repo.add_package('a', [b, c]) - with spack.repo.swap(mock_repo): + with spack.repo.use_repositories(mock_repo): assert set(a.possible_dependencies()) == set(['a', 'b', 'c', 'd', 'e']) assert set(b.possible_dependencies()) == set(['b', 'd', 'e']) assert set(c.possible_dependencies()) == set(['c', 'd', 'e']) diff --git a/lib/spack/spack/util/mock_package.py b/lib/spack/spack/util/mock_package.py index 5286b50464..734a93c469 100644 --- a/lib/spack/spack/util/mock_package.py +++ b/lib/spack/spack/util/mock_package.py @@ -8,6 +8,7 @@ import ordereddict_backport import spack.util.naming +import spack.provider_index from spack.dependency import Dependency from spack.spec import Spec from spack.version import Version @@ -80,6 +81,8 @@ class MockPackageMultiRepo(object): def __init__(self): self.spec_to_pkg = {} + self.namespace = '' + self.full_namespace = 'spack.pkg.mock' def get(self, spec): if not isinstance(spec, spack.spec.Spec): @@ -171,3 +174,7 @@ class MockPackageMultiRepo(object): self.spec_to_pkg["mockrepo." + name] = mock_package return mock_package + + @property + def provider_index(self): + return spack.provider_index.ProviderIndex() -- cgit v1.2.3-60-g2f50 From 4e5e1e8f35c3a01fa530a21a39fa7274b54a3989 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 1 Feb 2021 14:55:45 +0100 Subject: Move context manager to swap the current store into spack.store The context manager can be used to swap the current store temporarily, for any use case that may need it. (cherry picked from commit cb2c233a97073f8c5d89581ee2a2401fef5f878d) --- lib/spack/spack/store.py | 26 ++++++++++++++++++++++++++ lib/spack/spack/test/cmd/module.py | 5 +++-- lib/spack/spack/test/conftest.py | 16 +++------------- 3 files changed, 32 insertions(+), 15 deletions(-) diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py index 779a5531a4..6040145b64 100644 --- a/lib/spack/spack/store.py +++ b/lib/spack/spack/store.py @@ -23,6 +23,7 @@ install trees to define their own layouts with some per-tree configuration. """ +import contextlib import os import re import six @@ -227,3 +228,28 @@ def _construct_upstream_dbs_from_install_roots( accumulated_upstream_dbs.insert(0, next_db) return accumulated_upstream_dbs + + +@contextlib.contextmanager +def use_store(store_or_path): + """Use the store passed as argument within the context manager. + + Args: + store_or_path: either a Store object ot a path to where the store resides + + Returns: + Store object associated with the context manager's store + """ + global store + + # Normalize input arguments + temporary_store = store_or_path + if not isinstance(store_or_path, Store): + temporary_store = Store(store_or_path) + + # Swap the store with the one just constructed and return it + original_store, store = store, temporary_store + yield temporary_store + + # Restore the original store + store = original_store diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py index 9a34368fce..a488c47d1c 100644 --- a/lib/spack/spack/test/cmd/module.py +++ b/lib/spack/spack/test/cmd/module.py @@ -10,7 +10,8 @@ import pytest import spack.main import spack.modules -from spack.test.conftest import use_store, use_configuration +import spack.store +from spack.test.conftest import use_configuration module = spack.main.SpackCommand('module') @@ -21,7 +22,7 @@ def ensure_module_files_are_there( mock_repo_path, mock_store, mock_configuration): """Generate module files for module tests.""" module = spack.main.SpackCommand('module') - with use_store(mock_store): + with spack.store.use_store(mock_store): with use_configuration(mock_configuration): with spack.repo.use_repositories(mock_repo_path): module('tcl', 'refresh', '-y') diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index f214c78c18..153bee128f 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -35,6 +35,7 @@ import spack.paths import spack.platforms.test import spack.repo import spack.stage +import spack.store import spack.util.executable import spack.util.gpg import spack.subprocess_context @@ -373,15 +374,6 @@ def use_configuration(config): spack.compilers._cache_config_file = saved_compiler_cache -@contextlib.contextmanager -def use_store(store): - """Context manager to swap out the global Spack store.""" - saved = spack.store.store - spack.store.store = store - yield - spack.store.store = saved - - # # Test-specific fixtures # @@ -631,12 +623,11 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration, """ store_path, store_cache = _store_dir_and_cache - store = spack.store.Store(str(store_path)) # If the cache does not exist populate the store and create it if not os.path.exists(str(store_cache.join('.spack-db'))): with use_configuration(mock_configuration): - with use_store(store): + with spack.store.use_store(str(store_path)) as store: with spack.repo.use_repositories(mock_repo_path): _populate(store.db) store_path.copy(store_cache, mode=True, stat=True) @@ -661,12 +652,11 @@ def mutable_mock_store(tmpdir_factory, mock_repo_path, mock_configuration, """ store_path, store_cache = _store_dir_and_cache - store = spack.store.Store(str(store_path)) # If the cache does not exist populate the store and create it if not os.path.exists(str(store_cache.join('.spack-db'))): with use_configuration(mock_configuration): - with use_store(store): + with spack.store.use_store(str(store_path)) as store: with spack.repo.use_repositories(mock_repo_path): _populate(store.db) store_path.copy(store_cache, mode=True, stat=True) -- cgit v1.2.3-60-g2f50 From f30fc6cd33c8c201c80dd5d424d923d579f5e39b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 1 Feb 2021 17:27:00 +0100 Subject: Move context manager to swap the current configuration into spack.config The context manager can be used to swap the current configuration temporarily, for any use case that may need it. (cherry picked from commit 553d37a6d62b05f15986a702394f67486fa44e0e) --- lib/spack/spack/config.py | 67 ++++++++++++++++++++++++++++--------- lib/spack/spack/test/cmd/module.py | 7 ++-- lib/spack/spack/test/conftest.py | 68 +++++++++++++------------------------- 3 files changed, 78 insertions(+), 64 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 84d8e8ca3f..6067d65fff 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -29,6 +29,7 @@ schemas are in submodules of :py:mod:`spack.schema`. """ import collections +import contextlib import copy import functools import os @@ -48,6 +49,7 @@ from llnl.util.filesystem import mkdirp import spack.paths import spack.architecture +import spack.compilers import spack.schema import spack.schema.compilers import spack.schema.mirrors @@ -803,22 +805,6 @@ def _config(): config = llnl.util.lang.Singleton(_config) -def replace_config(configuration): - """Replace the current global configuration with the instance passed as - argument. - - Args: - configuration (Configuration): the new configuration to be used. - - Returns: - The old configuration that has been removed - """ - global config - config.clear_caches(), configuration.clear_caches() - old_config, config = config, configuration - return old_config - - def get(path, default=None, scope=None): """Module-level wrapper for ``Configuration.get()``.""" return config.get(path, default, scope) @@ -1133,6 +1119,55 @@ def ensure_latest_format_fn(section): return update_fn +@contextlib.contextmanager +def use_configuration(*scopes_or_paths): + """Use the configuration scopes passed as arguments within the + context manager. + + Args: + *scopes_or_paths: scope objects or paths to be used + + Returns: + Configuration object associated with the scopes passed as arguments + """ + global config + + # Normalize input and construct a Configuration object + configuration = _config_from(scopes_or_paths) + config.clear_caches(), configuration.clear_caches() + + # Save and clear the current compiler cache + saved_compiler_cache = spack.compilers._cache_config_file + spack.compilers._cache_config_file = [] + + saved_config, config = config, configuration + + yield configuration + + # Restore previous config files + spack.compilers._cache_config_file = saved_compiler_cache + config = saved_config + + +@llnl.util.lang.memoized +def _config_from(scopes_or_paths): + scopes = [] + for scope_or_path in scopes_or_paths: + # If we have a config scope we are already done + if isinstance(scope_or_path, ConfigScope): + scopes.append(scope_or_path) + continue + + # Otherwise we need to construct it + path = os.path.normpath(scope_or_path) + assert os.path.isdir(path), '"{0}" must be a directory'.format(path) + name = os.path.basename(path) + scopes.append(ConfigScope(name, path)) + + configuration = Configuration(*scopes) + return configuration + + class ConfigError(SpackError): """Superclass for all Spack config related errors.""" diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py index a488c47d1c..8be6c129c7 100644 --- a/lib/spack/spack/test/cmd/module.py +++ b/lib/spack/spack/test/cmd/module.py @@ -8,10 +8,10 @@ import re import pytest +import spack.config import spack.main import spack.modules import spack.store -from spack.test.conftest import use_configuration module = spack.main.SpackCommand('module') @@ -19,11 +19,12 @@ module = spack.main.SpackCommand('module') #: make sure module files are generated for all the tests here @pytest.fixture(scope='module', autouse=True) def ensure_module_files_are_there( - mock_repo_path, mock_store, mock_configuration): + mock_repo_path, mock_store, mock_configuration_scopes +): """Generate module files for module tests.""" module = spack.main.SpackCommand('module') with spack.store.use_store(mock_store): - with use_configuration(mock_configuration): + with spack.config.use_configuration(*mock_configuration_scopes): with spack.repo.use_repositories(mock_repo_path): module('tcl', 'refresh', '-y') diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 153bee128f..5b1e7b83e6 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import collections -import contextlib import errno import inspect import itertools @@ -336,7 +335,7 @@ spack.config.config = spack.config._config() # -# Context managers used by fixtures +# Note on context managers used by fixtures # # Because these context managers modify global state, they should really # ONLY be used persistently (i.e., around yield statements) in @@ -357,23 +356,6 @@ spack.config.config = spack.config._config() # *USE*, or things can get really confusing. # -@contextlib.contextmanager -def use_configuration(config): - """Context manager to swap out the global Spack configuration.""" - saved = spack.config.replace_config(config) - - # Avoid using real spack configuration that has been cached by other - # tests, and avoid polluting the cache with spack test configuration - # (including modified configuration) - saved_compiler_cache = spack.compilers._cache_config_file - spack.compilers._cache_config_file = [] - - yield - - spack.config.replace_config(saved) - spack.compilers._cache_config_file = saved_compiler_cache - - # # Test-specific fixtures # @@ -430,9 +412,7 @@ def default_config(): This ensures we can test the real default configuration without having tests fail when the user overrides the defaults that we test against.""" defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults') - defaults_scope = spack.config.ConfigScope('defaults', defaults_path) - defaults_config = spack.config.Configuration(defaults_scope) - with use_configuration(defaults_config): + with spack.config.use_configuration(defaults_path) as defaults_config: yield defaults_config @@ -508,7 +488,7 @@ def configuration_dir(tmpdir_factory, linux_os): @pytest.fixture(scope='session') -def mock_configuration(configuration_dir): +def mock_configuration_scopes(configuration_dir): """Create a persistent Configuration object from the configuration_dir.""" defaults = spack.config.InternalConfigScope( '_builtin', spack.config.config_defaults @@ -519,14 +499,14 @@ def mock_configuration(configuration_dir): for name in ['site', 'system', 'user']] test_scopes.append(spack.config.InternalConfigScope('command_line')) - yield spack.config.Configuration(*test_scopes) + yield test_scopes @pytest.fixture(scope='function') -def config(mock_configuration): +def config(mock_configuration_scopes): """This fixture activates/deactivates the mock configuration.""" - with use_configuration(mock_configuration): - yield mock_configuration + with spack.config.use_configuration(*mock_configuration_scopes) as config: + yield config @pytest.fixture(scope='function') @@ -535,11 +515,10 @@ def mutable_config(tmpdir_factory, configuration_dir): mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp') configuration_dir.copy(mutable_dir) - cfg = spack.config.Configuration( - *[spack.config.ConfigScope(name, str(mutable_dir.join(name))) - for name in ['site', 'system', 'user']]) + scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name))) + for name in ['site', 'system', 'user']] - with use_configuration(cfg): + with spack.config.use_configuration(*scopes) as cfg: yield cfg @@ -547,23 +526,20 @@ def mutable_config(tmpdir_factory, configuration_dir): def mutable_empty_config(tmpdir_factory, configuration_dir): """Empty configuration that can be modified by the tests.""" mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp') + scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name))) + for name in ['site', 'system', 'user']] - cfg = spack.config.Configuration( - *[spack.config.ConfigScope(name, str(mutable_dir.join(name))) - for name in ['site', 'system', 'user']]) - - with use_configuration(cfg): + with spack.config.use_configuration(*scopes) as cfg: yield cfg @pytest.fixture() def mock_low_high_config(tmpdir): """Mocks two configuration scopes: 'low' and 'high'.""" - config = spack.config.Configuration( - *[spack.config.ConfigScope(name, str(tmpdir.join(name))) - for name in ['low', 'high']]) + scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name))) + for name in ['low', 'high']] - with use_configuration(config): + with spack.config.use_configuration(*scopes) as config: yield config @@ -612,7 +588,7 @@ def _store_dir_and_cache(tmpdir_factory): @pytest.fixture(scope='session') -def mock_store(tmpdir_factory, mock_repo_path, mock_configuration, +def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes, _store_dir_and_cache): """Creates a read-only mock database with some packages installed note that the ref count for dyninst here will be 3, as it's recycled @@ -626,7 +602,7 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration, # If the cache does not exist populate the store and create it if not os.path.exists(str(store_cache.join('.spack-db'))): - with use_configuration(mock_configuration): + with spack.config.use_configuration(*mock_configuration_scopes): with spack.store.use_store(str(store_path)) as store: with spack.repo.use_repositories(mock_repo_path): _populate(store.db) @@ -641,8 +617,10 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration, @pytest.fixture(scope='function') -def mutable_mock_store(tmpdir_factory, mock_repo_path, mock_configuration, - _store_dir_and_cache): +def mutable_mock_store( + tmpdir_factory, mock_repo_path, mock_configuration_scopes, + _store_dir_and_cache +): """Creates a read-only mock database with some packages installed note that the ref count for dyninst here will be 3, as it's recycled across each install. @@ -655,7 +633,7 @@ def mutable_mock_store(tmpdir_factory, mock_repo_path, mock_configuration, # If the cache does not exist populate the store and create it if not os.path.exists(str(store_cache.join('.spack-db'))): - with use_configuration(mock_configuration): + with spack.config.use_configuration(*mock_configuration_scopes): with spack.store.use_store(str(store_path)) as store: with spack.repo.use_repositories(mock_repo_path): _populate(store.db) -- cgit v1.2.3-60-g2f50 From 095ace90287e065c37daaca549d4de6f34b5674d Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 5 Jan 2021 12:27:13 -0800 Subject: bugfix for target adjustments on target ranges (#20537) (cherry picked from commit 61c1b71d38e62a5af81b3b7b8a8d12b954d99f0a) --- lib/spack/spack/concretize.py | 19 ++++++++++++------- lib/spack/spack/test/concretize.py | 11 ++++++----- lib/spack/spack/test/conftest.py | 5 ++--- 3 files changed, 20 insertions(+), 15 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 3c3801bf5b..7e180eb91e 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -578,10 +578,14 @@ class Concretizer(object): True if spec was modified, False otherwise """ # To minimize the impact on performance this function will attempt - # to adjust the target only at the very first call. It will just - # return False on subsequent calls. The way this is achieved is by - # initializing a generator and making this function return the next - # answer. + # to adjust the target only at the very first call once necessary + # information is set. It will just return False on subsequent calls. + # The way this is achieved is by initializing a generator and making + # this function return the next answer. + if not (spec.architecture and spec.architecture.concrete): + # Not ready, but keep going because we have work to do later + return True + def _make_only_one_call(spec): yield self._adjust_target(spec) while True: @@ -619,9 +623,10 @@ class Concretizer(object): if PackagePrefs.has_preferred_targets(spec.name): default_target = self.target_from_package_preferences(spec) - if current_target != default_target or \ - (self.abstract_spec.architecture is not None and - self.abstract_spec.architecture.target is not None): + if current_target != default_target or ( + self.abstract_spec and + self.abstract_spec.architecture and + self.abstract_spec.architecture.concrete): return False try: diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index e5eaa7b7fb..2220ab9eb4 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -688,13 +688,14 @@ class TestConcretize(object): with pytest.raises(spack.error.SpackError): Spec(spec).concretized() + # Include targets to prevent regression on 20537 @pytest.mark.parametrize('spec, best_achievable', [ - ('mpileaks%gcc@4.4.7', 'core2'), - ('mpileaks%gcc@4.8', 'haswell'), - ('mpileaks%gcc@5.3.0', 'broadwell'), - ('mpileaks%apple-clang@5.1.0', 'x86_64') + ('mpileaks%gcc@4.4.7 target=x86_64:', 'core2'), + ('mpileaks%gcc@4.8 target=x86_64:', 'haswell'), + ('mpileaks%gcc@5.3.0 target=x86_64:', 'broadwell'), + ('mpileaks%apple-clang@5.1.0 target=x86_64:', 'x86_64') ]) - @pytest.mark.regression('13361') + @pytest.mark.regression('13361', '20537') def test_adjusting_default_target_based_on_compiler( self, spec, best_achievable, current_host, mock_targets ): diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 5b1e7b83e6..18a7924c9e 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -434,9 +434,8 @@ def mock_uarch_configuration(mock_uarch_json): with open(mock_uarch_json) as f: return json.load(f) - targets_json = archspec.cpu.schema.LazyDictionary(load_json) - targets = archspec.cpu.microarchitecture.LazyDictionary( - archspec.cpu.microarchitecture._known_microarchitectures) + targets_json = load_json() + targets = archspec.cpu.microarchitecture._known_microarchitectures() yield targets_json, targets -- cgit v1.2.3-60-g2f50 From 2a5f46d8d33ea42ec506544d7727843073398b11 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 2 Feb 2021 09:57:09 +0100 Subject: Added a context manager to swap architectures This solves a few FIXMEs in conftest.py, where we were manipulating globals and seeing side effects prior to registering fixtures. This commit solves the FIXMEs, but introduces a performance regression on tests that may need to be investigated (cherry picked from commit 4558dc06e21e01ab07a43737b8cb99d1d69abb5d) --- lib/spack/spack/architecture.py | 58 ++++++++++++++++++++++++++++++++-- lib/spack/spack/test/architecture.py | 35 ++++++++++++-------- lib/spack/spack/test/cmd/extensions.py | 2 +- lib/spack/spack/test/cmd/undevelop.py | 4 +-- lib/spack/spack/test/conftest.py | 25 ++++++--------- 5 files changed, 89 insertions(+), 35 deletions(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index c0930fe2d0..ece9040a7d 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -56,6 +56,7 @@ set. The user can set the front-end and back-end operating setting by the class attributes front_os and back_os. The operating system as described earlier, will be responsible for compiler detection. """ +import contextlib import functools import inspect import warnings @@ -67,6 +68,8 @@ import llnl.util.tty as tty from llnl.util.lang import memoized, list_modules, key_ordering import spack.compiler +import spack.compilers +import spack.config import spack.paths import spack.error as serr import spack.util.executable @@ -491,7 +494,7 @@ def arch_for_spec(arch_spec): @memoized -def all_platforms(): +def _all_platforms(): classes = [] mod_path = spack.paths.platform_path parent_module = "spack.platforms" @@ -512,7 +515,7 @@ def all_platforms(): @memoized -def platform(): +def _platform(): """Detects the platform for this machine. Gather a list of all available subclasses of platforms. @@ -521,7 +524,7 @@ def platform(): a file path (/opt/cray...) """ # Try to create a Platform object using the config file FIRST - platform_list = all_platforms() + platform_list = _all_platforms() platform_list.sort(key=lambda a: a.priority) for platform_cls in platform_list: @@ -529,6 +532,19 @@ def platform(): return platform_cls() +#: The "real" platform of the host running Spack. This should not be changed +#: by any method and is here as a convenient way to refer to the host platform. +real_platform = _platform + +#: The current platform used by Spack. May be swapped by the use_platform +#: context manager. +platform = _platform + +#: The list of all platform classes. May be swapped by the use_platform +#: context manager. +all_platforms = _all_platforms + + @memoized def default_arch(): """Default ``Arch`` object for this machine. @@ -563,3 +579,39 @@ def compatible_sys_types(): arch = Arch(platform(), 'default_os', target) compatible_archs.append(str(arch)) return compatible_archs + + +class _PickleableCallable(object): + """Class used to pickle a callable that may substitute either + _platform or _all_platforms. Lambda or nested functions are + not pickleable. + """ + def __init__(self, return_value): + self.return_value = return_value + + def __call__(self): + return self.return_value + + +@contextlib.contextmanager +def use_platform(new_platform): + global platform, all_platforms + + msg = '"{0}" must be an instance of Platform' + assert isinstance(new_platform, Platform), msg.format(new_platform) + + original_platform_fn, original_all_platforms_fn = platform, all_platforms + platform = _PickleableCallable(new_platform) + all_platforms = _PickleableCallable([type(new_platform)]) + + # Clear configuration and compiler caches + spack.config.config.clear_caches() + spack.compilers._cache_config_files = [] + + yield new_platform + + platform, all_platforms = original_platform_fn, original_all_platforms_fn + + # Clear configuration and compiler caches + spack.config.config.clear_caches() + spack.compilers._cache_config_files = [] diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index 66c87240a1..47e1e25fe8 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -6,6 +6,7 @@ """ Test checks if the architecture class is created correctly and also that the functions are looking for the correct architecture name """ +import itertools import os import platform as py_platform @@ -116,20 +117,26 @@ def test_user_defaults(config): assert default_target == default_spec.architecture.target -@pytest.mark.parametrize('operating_system', [ - x for x in spack.architecture.platform().operating_sys -] + ["fe", "be", "frontend", "backend"]) -@pytest.mark.parametrize('target', [ - x for x in spack.architecture.platform().targets -] + ["fe", "be", "frontend", "backend"]) -def test_user_input_combination(config, operating_system, target): - platform = spack.architecture.platform() - spec = Spec("libelf os=%s target=%s" % (operating_system, target)) - spec.concretize() - assert spec.architecture.os == str( - platform.operating_system(operating_system) - ) - assert spec.architecture.target == platform.target(target) +def test_user_input_combination(config): + valid_keywords = ["fe", "be", "frontend", "backend"] + + possible_targets = ([x for x in spack.architecture.platform().targets] + + valid_keywords) + + possible_os = ([x for x in spack.architecture.platform().operating_sys] + + valid_keywords) + + for target, operating_system in itertools.product( + possible_targets, possible_os + ): + platform = spack.architecture.platform() + spec_str = "libelf os={0} target={1}".format(operating_system, target) + spec = Spec(spec_str) + spec.concretize() + assert spec.architecture.os == str( + platform.operating_system(operating_system) + ) + assert spec.architecture.target == platform.target(target) def test_operating_system_conversion_to_dict(): diff --git a/lib/spack/spack/test/cmd/extensions.py b/lib/spack/spack/test/cmd/extensions.py index 7fc56593eb..ad993244ae 100644 --- a/lib/spack/spack/test/cmd/extensions.py +++ b/lib/spack/spack/test/cmd/extensions.py @@ -27,7 +27,7 @@ def python_database(mock_packages, mutable_database): @pytest.mark.db -def test_extensions(mock_packages, python_database, capsys): +def test_extensions(mock_packages, python_database, config, capsys): ext2 = Spec("py-extension2").concretized() def check_output(ni, na): diff --git a/lib/spack/spack/test/cmd/undevelop.py b/lib/spack/spack/test/cmd/undevelop.py index d44330fe76..493bb99228 100644 --- a/lib/spack/spack/test/cmd/undevelop.py +++ b/lib/spack/spack/test/cmd/undevelop.py @@ -12,7 +12,7 @@ env = SpackCommand('env') concretize = SpackCommand('concretize') -def test_undevelop(tmpdir, mock_packages, mutable_mock_env_path): +def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path): # setup environment envdir = tmpdir.mkdir('env') with envdir.as_cwd(): @@ -39,7 +39,7 @@ env: assert not after.satisfies('dev_path=*') -def test_undevelop_nonexistent(tmpdir, mock_packages, mutable_mock_env_path): +def test_undevelop_nonexistent(tmpdir, config, mock_packages, mutable_mock_env_path): # setup environment envdir = tmpdir.mkdir('env') with envdir.as_cwd(): diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 18a7924c9e..297ae0d4f2 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -315,24 +315,18 @@ def _skip_if_missing_executables(request): pytest.skip(msg.format(', '.join(missing_execs))) -# FIXME: The lines below should better be added to a fixture with -# FIXME: session-scope. Anyhow doing it is not easy, as it seems -# FIXME: there's some weird interaction with compilers during concretization. -spack.architecture.real_platform = spack.architecture.platform - - +@pytest.fixture(scope='session') def test_platform(): return spack.platforms.test.Test() -spack.architecture.platform = test_platform - - -# FIXME: Since we change the architecture above, we have to (re)initialize -# FIXME: the config singleton. If it gets initialized too early with the -# FIXME: actual architecture, tests will fail. -spack.config.config = spack.config._config() - +@pytest.fixture(autouse=True, scope='session') +def _use_test_platform(test_platform): + # This is the only context manager used at session scope (see note + # below for more insight) since we want to use the test platform as + # a default during tests. + with spack.architecture.use_platform(test_platform): + yield # # Note on context managers used by fixtures @@ -356,6 +350,7 @@ spack.config.config = spack.config._config() # *USE*, or things can get really confusing. # + # # Test-specific fixtures # -- cgit v1.2.3-60-g2f50 From bd9929f9dc9649f84f710fb2c6540db1d0ea946a Mon Sep 17 00:00:00 2001 From: Andreas Baumbach Date: Fri, 26 Feb 2021 09:02:17 +0100 Subject: make `spack fetch` work with environments (#19166) * make `spack fetch` work with environments * previously: `spack fetch` required the explicit statement of the specs to be fetched, even when in an environment * now: if there is no spec(s) provided to `spack fetch` we check if an environment is active and if yes we fetch all uninstalled specs. --- lib/spack/spack/cmd/fetch.py | 48 +++++++++++++++++++++++++++++++-------- lib/spack/spack/environment.py | 28 ++++++++++++++--------- lib/spack/spack/test/cmd/env.py | 13 +++++++++++ lib/spack/spack/test/cmd/fetch.py | 48 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 117 insertions(+), 20 deletions(-) create mode 100644 lib/spack/spack/test/cmd/fetch.py diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py index b91eb52ab8..d2cd53c69f 100644 --- a/lib/spack/spack/cmd/fetch.py +++ b/lib/spack/spack/cmd/fetch.py @@ -8,6 +8,7 @@ import llnl.util.tty as tty import spack.cmd import spack.cmd.common.arguments as arguments import spack.config +import spack.environment as ev import spack.repo description = "fetch archives for packages" @@ -18,22 +19,51 @@ level = "long" def setup_parser(subparser): arguments.add_common_arguments(subparser, ['no_checksum']) subparser.add_argument( - '-m', '--missing', action='store_true', - help="fetch only missing (not yet installed) dependencies") + "-m", + "--missing", + action="store_true", + help="fetch only missing (not yet installed) dependencies", + ) subparser.add_argument( - '-D', '--dependencies', action='store_true', - help="also fetch all dependencies") - arguments.add_common_arguments(subparser, ['specs']) + "-D", + "--dependencies", + action="store_true", + help="also fetch all dependencies", + ) + arguments.add_common_arguments(subparser, ["specs"]) + subparser.epilog = ( + "With an active environment, the specs " + "parameter can be omitted. In this case all (uninstalled" + ", in case of --missing) specs from the environment are fetched" + ) def fetch(parser, args): - if not args.specs: - tty.die("fetch requires at least one package argument") + if args.specs: + specs = spack.cmd.parse_specs(args.specs, concretize=True) + else: + # No specs were given explicitly, check if we are in an + # environment. If yes, check the missing argument, if yes + # fetch all uninstalled specs from it otherwise fetch all. + # If we are also not in an environment, complain to the + # user that we don't know what to do. + env = ev.get_env(args, "fetch") + if env: + if args.missing: + specs = env.uninstalled_specs() + else: + specs = env.all_specs() + if specs == []: + tty.die( + "No uninstalled specs in environment. Did you " + "run `spack concretize` yet?" + ) + else: + tty.die("fetch requires at least one spec argument") if args.no_checksum: - spack.config.set('config:checksum', False, scope='command_line') + spack.config.set("config:checksum", False, scope="command_line") - specs = spack.cmd.parse_specs(args.specs, concretize=True) for spec in specs: if args.missing or args.dependencies: for s in spec.traverse(): diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 38c2c4cdb7..868ebb6ac5 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1387,6 +1387,21 @@ class Environment(object): os.remove(build_log_link) os.symlink(spec.package.build_log_path, build_log_link) + def uninstalled_specs(self): + """Return a list of all uninstalled (and non-dev) specs.""" + # Do the installed check across all specs within a single + # DB read transaction to reduce time spent in lock acquisition. + uninstalled_specs = [] + with spack.store.db.read_transaction(): + for concretized_hash in self.concretized_order: + spec = self.specs_by_hash[concretized_hash] + if not spec.package.installed or ( + spec.satisfies('dev_path=*') or + spec.satisfies('^dev_path=*') + ): + uninstalled_specs.append(spec) + return uninstalled_specs + def install_all(self, args=None, **install_args): """Install all concretized specs in an environment. @@ -1397,22 +1412,13 @@ class Environment(object): args (Namespace): argparse namespace with command arguments install_args (dict): keyword install arguments """ + tty.debug('Assessing installation status of environment packages') # If "spack install" is invoked repeatedly for a large environment # where all specs are already installed, the operation can take # a large amount of time due to repeatedly acquiring and releasing # locks, this does an initial check across all specs within a single # DB read transaction to reduce time spent in this case. - tty.debug('Assessing installation status of environment packages') - specs_to_install = [] - with spack.store.db.read_transaction(): - for concretized_hash in self.concretized_order: - spec = self.specs_by_hash[concretized_hash] - if not spec.package.installed or ( - spec.satisfies('dev_path=*') or - spec.satisfies('^dev_path=*') - ): - # If it's a dev build it could need to be reinstalled - specs_to_install.append(spec) + specs_to_install = self.uninstalled_specs() if not specs_to_install: tty.msg('All of the packages are already installed') diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 72f1b86347..44b5caf5f9 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -139,6 +139,19 @@ def test_concretize(): assert any(x.name == 'mpileaks' for x in env_specs) +def test_env_uninstalled_specs(install_mockery, mock_fetch): + e = ev.create('test') + e.add('cmake-client') + e.concretize() + assert any(s.name == 'cmake-client' for s in e.uninstalled_specs()) + e.install_all() + assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs()) + e.add('mpileaks') + e.concretize() + assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs()) + assert any(s.name == 'mpileaks' for s in e.uninstalled_specs()) + + def test_env_install_all(install_mockery, mock_fetch): e = ev.create('test') e.add('cmake-client') diff --git a/lib/spack/spack/test/cmd/fetch.py b/lib/spack/spack/test/cmd/fetch.py new file mode 100644 index 0000000000..d2f17eeb1b --- /dev/null +++ b/lib/spack/spack/test/cmd/fetch.py @@ -0,0 +1,48 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import pytest + +import spack.environment as ev + +from spack.main import SpackCommand, SpackCommandError + + +# everything here uses the mock_env_path +pytestmark = pytest.mark.usefixtures( + "mutable_mock_env_path", "config", "mutable_mock_repo" +) + + +@pytest.mark.disable_clean_stage_check +def test_fetch_in_env( + tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery +): + SpackCommand("env")("create", "test") + with ev.read("test"): + SpackCommand("add")("python") + with pytest.raises(SpackCommandError): + SpackCommand("fetch")() + SpackCommand("concretize")() + SpackCommand("fetch")() + + +@pytest.mark.disable_clean_stage_check +def test_fetch_single_spec( + tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery +): + SpackCommand("fetch")("mpileaks") + + +@pytest.mark.disable_clean_stage_check +def test_fetch_multiple_specs( + tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery +): + SpackCommand("fetch")("mpileaks", "gcc@10.2.0", "python") + + +def test_fetch_no_argument(): + with pytest.raises(SpackCommandError): + SpackCommand("fetch")() -- cgit v1.2.3-60-g2f50 From 316c292685b99db9a3ae37c5c6f5955f72a9aeca Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 21 Dec 2020 15:08:25 -0800 Subject: clingo: prefer master branch Most people installing `clingo` with Spack are going to be doing it to use the new concretizer, and that requires the `master` branch. - [x] make `master` the default so we don't have to keep telling people to install `clingo@master`. We'll update the preferred version when there's a new release. --- var/spack/repos/builtin/packages/clingo/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index 856f0d6338..676bf7b99e 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -22,7 +22,7 @@ class Clingo(CMakePackage): maintainers = ["tgamblin"] - version('master', branch='master', submodules=True) + version('master', branch='master', submodules=True, preferred=True) version('spack', commit='2ab2e81bcb24f6070b7efce30a754d74ef52ee2d', submodules=True) version('5.4.0', sha256='e2de331ee0a6d254193aab5995338a621372517adcf91568092be8ac511c18f3') -- cgit v1.2.3-60-g2f50 From add339cbfe357c976a912485ff075336aa575ceb Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 28 Jan 2021 02:38:01 -0600 Subject: Clingo: fix missing import (#21364) --- var/spack/repos/builtin/packages/clingo/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index 676bf7b99e..db59681d0a 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +from spack.compiler import UnsupportedCompilerFlag class Clingo(CMakePackage): -- cgit v1.2.3-60-g2f50 From 0b5c5b8068be0a973d1ed37cb011bb0cbba3e763 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 29 Jan 2021 21:22:57 +0100 Subject: clingo: added a package with option for bootstrapping clingo (#20652) * clingo/clingo-bootstrap: added a package with option for bootstrapping clingo package builds in Release mode uses GCC options to link libstdc++ and libgcc statically * clingo-bootstrap: apple-clang options to bootstrap statically on darwin * clingo: fix the path of the Python interpreter In case multiple Python versions are in the same prefix (e.g. when clingo is built against an external Python), it may happen that the Python used by CMake does not match the corresponding node in the current spec. This is fixed here by defining "Python_EXECUTABLE" properly as a hint to CMake. * clingo: the commit for "spack" version has been updated. --- .../builtin/packages/clingo-bootstrap/package.py | 51 ++++++++++++++++++++++ var/spack/repos/builtin/packages/clingo/package.py | 17 +++++++- 2 files changed, 66 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin/packages/clingo-bootstrap/package.py diff --git a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py new file mode 100644 index 0000000000..06858da285 --- /dev/null +++ b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py @@ -0,0 +1,51 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.pkg.builtin.clingo import Clingo + +import spack.compilers + + +class ClingoBootstrap(Clingo): + """Clingo with some options used for bootstrapping""" + variant('build_type', default='Release', values=('Release',), + description='CMake build type') + + # CMake at version 3.16.0 or higher has the possibility to force the + # Python interpreter, which is crucial to build against external Python + # in environment where more than one interpreter is in the same prefix + depends_on('cmake@3.16.0:', type='build') + + # On Linux we bootstrap with GCC + for compiler_spec in [ + c for c in spack.compilers.supported_compilers() + if c != 'gcc' + ]: + conflicts('%{0}'.format(compiler_spec), when='platform=linux', + msg='GCC is required to bootstrap clingo on Linux') + conflicts('%gcc@:5.99.99', when='platform=linux', + msg='C++14 support is required to bootstrap clingo on Linux') + + # On Darwin we bootstrap with Apple Clang + for compiler_spec in [ + c for c in spack.compilers.supported_compilers() + if c != 'apple-clang' + ]: + conflicts('%{0}'.format(compiler_spec), when='platform=darwin', + msg='Apple-clang is required to bootstrap clingo on MacOS') + + # Clingo needs the Python module to be usable by Spack + conflicts('~python', msg='Python support is required to bootstrap Spack') + + def setup_build_environment(self, env): + if '%apple-clang platform=darwin' in self.spec: + opts = '-mmacosx-version-min=10.13' + elif '%gcc platform=linux' in self.spec: + opts = '-static-libstdc++ -static-libgcc -Wl,--exclude-libs,ALL' + else: + msg = 'unexpected compiler for spec "{0}"'.format(self.spec) + raise RuntimeError(msg) + + env.set('CXXFLAGS', opts) + env.set('LDFLAGS', opts) diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index db59681d0a..c15a075731 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -24,7 +24,7 @@ class Clingo(CMakePackage): maintainers = ["tgamblin"] version('master', branch='master', submodules=True, preferred=True) - version('spack', commit='2ab2e81bcb24f6070b7efce30a754d74ef52ee2d', submodules=True) + version('spack', commit='2a025667090d71b2c9dce60fe924feb6bde8f667', submodules=True) version('5.4.0', sha256='e2de331ee0a6d254193aab5995338a621372517adcf91568092be8ac511c18f3') version('5.3.0', sha256='b0d406d2809352caef7fccf69e8864d55e81ee84f4888b0744894977f703f976') @@ -51,13 +51,22 @@ class Clingo(CMakePackage): 'clasp/CMakeLists.txt', 'clasp/libpotassco/CMakeLists.txt') + @property + def cmake_python_hints(self): + """Return standard CMake defines to ensure that the + current spec is the one found by CMake find_package(Python, ...) + """ + return [ + '-DPython_EXECUTABLE={0}'.format(str(self.spec['python'].command)) + ] + def cmake_args(self): try: self.compiler.cxx14_flag except UnsupportedCompilerFlag: InstallError('clingo requires a C++14-compliant C++ compiler') - return [ + args = [ '-DCLINGO_REQUIRE_PYTHON=ON', '-DCLINGO_BUILD_WITH_PYTHON=ON', '-DCLINGO_BUILD_PY_SHARED=ON', @@ -65,3 +74,7 @@ class Clingo(CMakePackage): '-DPYCLINGO_USE_INSTALL_PREFIX=ON', '-DCLINGO_BUILD_WITH_LUA=OFF' ] + if self.spec['cmake'].satisfies('@3.16.0:'): + args += self.cmake_python_hints + + return args -- cgit v1.2.3-60-g2f50 From 68ef6fce92766601a34fa5bc221f0e9e0dba09d5 Mon Sep 17 00:00:00 2001 From: Maxim Belkin Date: Sun, 21 Mar 2021 18:52:46 -0500 Subject: clingo: fix typo (#22444) --- var/spack/repos/builtin/packages/clingo/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index c15a075731..7f43f4708b 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -30,7 +30,7 @@ class Clingo(CMakePackage): version('5.3.0', sha256='b0d406d2809352caef7fccf69e8864d55e81ee84f4888b0744894977f703f976') version('5.2.2', sha256='da1ef8142e75c5a6f23c9403b90d4f40b9f862969ba71e2aaee9a257d058bfcf') - variant("docs", default=False, description="build documentation with Doxyegen") + variant("docs", default=False, description="build documentation with Doxygen") variant("python", default=True, description="build with python bindings") depends_on('doxygen', type="build", when="+docs") -- cgit v1.2.3-60-g2f50 From 6c1b348d912175ce49141386cee8d79d8035269b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 22 Mar 2021 23:57:32 +0100 Subject: clingo-bootstrap: account for cray platform (#22460) (cherry picked from commit 138312efabd534fa42d1a16e172e859f0d2b5842) --- var/spack/repos/builtin/packages/clingo-bootstrap/package.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py index 06858da285..014ba12927 100644 --- a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py @@ -24,8 +24,11 @@ class ClingoBootstrap(Clingo): ]: conflicts('%{0}'.format(compiler_spec), when='platform=linux', msg='GCC is required to bootstrap clingo on Linux') - conflicts('%gcc@:5.99.99', when='platform=linux', - msg='C++14 support is required to bootstrap clingo on Linux') + conflicts('%{0}'.format(compiler_spec), when='platform=cray', + msg='GCC is required to bootstrap clingo on Cray') + conflicts( + '%gcc@:5.99.99', msg='C++14 support is required to bootstrap clingo' + ) # On Darwin we bootstrap with Apple Clang for compiler_spec in [ @@ -41,7 +44,8 @@ class ClingoBootstrap(Clingo): def setup_build_environment(self, env): if '%apple-clang platform=darwin' in self.spec: opts = '-mmacosx-version-min=10.13' - elif '%gcc platform=linux' in self.spec: + elif '%gcc' in self.spec: + # This is either linux or cray opts = '-static-libstdc++ -static-libgcc -Wl,--exclude-libs,ALL' else: msg = 'unexpected compiler for spec "{0}"'.format(self.spec) -- cgit v1.2.3-60-g2f50 From 16f7a02654095801dfd67cd647136c6466a9f73c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 3 Mar 2021 18:37:46 +0100 Subject: Bootstrap clingo from sources (#21446) * Allow the bootstrapping of clingo from sources Allow python builds with system python as external for MacOS * Ensure consistent configuration when bootstrapping clingo This commit uses context managers to ensure we can bootstrap clingo using a consistent configuration regardless of the use case being managed. * Github actions: test clingo with bootstrapping from sources * Add command to inspect and clean the bootstrap store Prevent users to set the install tree root to the bootstrap store * clingo: documented how to bootstrap from sources Co-authored-by: Gregory Becker (cherry picked from commit 10e9e142b75c6ca8bc61f688260c002201cc1b22) --- .github/workflows/linux_unit_tests.yaml | 33 ++---- lib/spack/docs/getting_started.rst | 47 ++++++++ lib/spack/spack/bootstrap.py | 188 ++++++++++++++++++++++++++++++++ lib/spack/spack/build_systems/python.py | 23 +++- lib/spack/spack/cmd/clean.py | 19 +++- lib/spack/spack/cmd/find.py | 13 ++- lib/spack/spack/paths.py | 3 +- lib/spack/spack/solver/asp.py | 20 +++- lib/spack/spack/store.py | 10 ++ lib/spack/spack/util/environment.py | 9 +- lib/spack/spack/util/executable.py | 38 +++++-- share/spack/spack-completion.bash | 6 +- 12 files changed, 363 insertions(+), 46 deletions(-) create mode 100644 lib/spack/spack/bootstrap.py diff --git a/.github/workflows/linux_unit_tests.yaml b/.github/workflows/linux_unit_tests.yaml index 5df0957659..f21f9cd0ab 100644 --- a/.github/workflows/linux_unit_tests.yaml +++ b/.github/workflows/linux_unit_tests.yaml @@ -15,6 +15,7 @@ jobs: strategy: matrix: python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9] + concretizer: ['original', 'clingo'] steps: - uses: actions/checkout@v2 @@ -50,16 +51,23 @@ jobs: mkdir -p ${KCOV_ROOT}/build cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install + - name: Bootstrap clingo from sources + if: ${{ matrix.concretizer == 'clingo' }} + run: | + . share/spack/setup-env.sh + spack external find --not-buildable cmake bison + spack -v solve zlib - name: Run unit tests env: COVERAGE: true + SPACK_TEST_SOLVER: ${{ matrix.concretizer }} run: | share/spack/qa/run-unit-tests coverage combine coverage xml - uses: codecov/codecov-action@v1 with: - flags: unittests,linux + flags: unittests,linux,${{ matrix.concretizer }} shell: runs-on: ubuntu-latest steps: @@ -103,6 +111,7 @@ jobs: - uses: codecov/codecov-action@v1 with: flags: shelltests,linux + centos6: # Test for Python2.6 run on Centos 6 runs-on: ubuntu-latest @@ -117,27 +126,7 @@ jobs: git fetch origin ${{ github.ref }}:test-branch git checkout test-branch share/spack/qa/run-unit-tests - clingo: - # Test for the clingo based solver - runs-on: ubuntu-latest - container: spack/github-actions:clingo - steps: - - name: Run unit tests - run: | - whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER - which clingo && clingo --version - git clone https://github.com/spack/spack.git && cd spack - git fetch origin ${{ github.ref }}:test-branch - git checkout test-branch - . share/spack/setup-env.sh - spack compiler find - spack solve mpileaks%gcc - coverage run $(which spack) unit-test -v - coverage combine - coverage xml - - uses: codecov/codecov-action@v1 - with: - flags: unittests,linux,clingo + clingo-cffi: # Test for the clingo based solver (using clingo-cffi) runs-on: ubuntu-latest diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index f1df0343bd..09cc2c488a 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -103,6 +103,53 @@ environment*, especially for ``PATH``. Only software that comes with the system, or that you know you wish to use with Spack, should be included. This procedure will avoid many strange build errors. +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Optional: Bootstrapping clingo +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Spack supports using clingo as an external solver to compute which software +needs to be installed. If you have a default compiler supporting C++14 Spack +can automatically bootstrap this tool from sources the first time it is +needed: + +.. code-block:: console + + $ spack solve zlib + [+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb) + [+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b) + [+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv) + ==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt + [ ... ] + ==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0] + zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell + +If you want to speed-up bootstrapping, you may try to search for ``cmake`` and ``bison`` +on your system: + +.. code-block:: console + + $ spack external find cmake bison + ==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml + bison@3.0.4 cmake@3.19.4 + +All the tools Spack needs for its own functioning are installed in a separate store, which lives +under the ``${HOME}/.spack`` directory. The software installed there can be queried with: + +.. code-block:: console + + $ spack find --bootstrap + ==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store" + ==> 3 installed packages + -- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------ + clingo-bootstrap@spack python@3.6.9 re2c@1.2.1 + +In case it's needed the bootstrap store can also be cleaned with: + +.. code-block:: console + + $ spack clean -b + ==> Removing software in "/home/spack/.spack/bootstrap/store" + ^^^^^^^^^^^^^^^^^^^^^^^^^^ Optional: Alternate Prefix ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py new file mode 100644 index 0000000000..93e79fbe71 --- /dev/null +++ b/lib/spack/spack/bootstrap.py @@ -0,0 +1,188 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import contextlib +import os +import sys + +import llnl.util.filesystem as fs +import llnl.util.tty as tty + +import spack.architecture +import spack.config +import spack.paths +import spack.repo +import spack.spec +import spack.store +import spack.user_environment as uenv +import spack.util.executable +from spack.util.environment import EnvironmentModifications + + +@contextlib.contextmanager +def spack_python_interpreter(): + """Override the current configuration to set the interpreter under + which Spack is currently running as the only Python external spec + available. + """ + python_cls = type(spack.spec.Spec('python').package) + python_prefix = os.path.dirname(os.path.dirname(sys.executable)) + externals = python_cls.determine_spec_details( + python_prefix, [os.path.basename(sys.executable)]) + external_python = externals[0] + + entry = { + 'buildable': False, + 'externals': [ + {'prefix': python_prefix, 'spec': str(external_python)} + ] + } + + with spack.config.override('packages:python::', entry): + yield + + +def make_module_available(module, spec=None, install=False): + """Ensure module is importable""" + # If we already can import it, that's great + try: + __import__(module) + return + except ImportError: + pass + + # If it's already installed, use it + # Search by spec + spec = spack.spec.Spec(spec or module) + + # We have to run as part of this python + # We can constrain by a shortened version in place of a version range + # because this spec is only used for querying or as a placeholder to be + # replaced by an external that already has a concrete version. This syntax + # is not suffucient when concretizing without an external, as it will + # concretize to python@X.Y instead of python@X.Y.Z + spec.constrain('^python@%d.%d' % sys.version_info[:2]) + installed_specs = spack.store.db.query(spec, installed=True) + + for ispec in installed_specs: + # TODO: make sure run-environment is appropriate + module_path = os.path.join(ispec.prefix, + ispec['python'].package.site_packages_dir) + module_path_64 = module_path.replace('/lib/', '/lib64/') + try: + sys.path.append(module_path) + sys.path.append(module_path_64) + __import__(module) + return + except ImportError: + tty.warn("Spec %s did not provide module %s" % (ispec, module)) + sys.path = sys.path[:-2] + + def _raise_error(module_name, module_spec): + error_msg = 'cannot import module "{0}"'.format(module_name) + if module_spec: + error_msg += ' from spec "{0}'.format(module_spec) + raise ImportError(error_msg) + + if not install: + _raise_error(module, spec) + + with spack_python_interpreter(): + # We will install for ourselves, using this python if needed + # Concretize the spec + spec.concretize() + spec.package.do_install() + + module_path = os.path.join(spec.prefix, + spec['python'].package.site_packages_dir) + module_path_64 = module_path.replace('/lib/', '/lib64/') + try: + sys.path.append(module_path) + sys.path.append(module_path_64) + __import__(module) + return + except ImportError: + sys.path = sys.path[:-2] + _raise_error(module, spec) + + +def get_executable(exe, spec=None, install=False): + """Find an executable named exe, either in PATH or in Spack + + Args: + exe (str): needed executable name + spec (Spec or str): spec to search for exe in (default exe) + install (bool): install spec if not available + + When ``install`` is True, Spack will use the python used to run Spack as an + external. The ``install`` option should only be used with packages that + install quickly (when using external python) or are guaranteed by Spack + organization to be in a binary mirror (clingo).""" + # Search the system first + runner = spack.util.executable.which(exe) + if runner: + return runner + + # Check whether it's already installed + spec = spack.spec.Spec(spec or exe) + installed_specs = spack.store.db.query(spec, installed=True) + for ispec in installed_specs: + # filter out directories of the same name as the executable + exe_path = [exe_p for exe_p in fs.find(ispec.prefix, exe) + if fs.is_exe(exe_p)] + if exe_path: + ret = spack.util.executable.Executable(exe_path[0]) + envmod = EnvironmentModifications() + for dep in ispec.traverse(root=True, order='post'): + envmod.extend(uenv.environment_modifications_for_spec(dep)) + ret.add_default_envmod(envmod) + return ret + else: + tty.warn('Exe %s not found in prefix %s' % (exe, ispec.prefix)) + + def _raise_error(executable, exe_spec): + error_msg = 'cannot find the executable "{0}"'.format(executable) + if exe_spec: + error_msg += ' from spec "{0}'.format(exe_spec) + raise RuntimeError(error_msg) + + # If we're not allowed to install this for ourselves, we can't find it + if not install: + _raise_error(exe, spec) + + with spack_python_interpreter(): + # We will install for ourselves, using this python if needed + # Concretize the spec + spec.concretize() + + spec.package.do_install() + # filter out directories of the same name as the executable + exe_path = [exe_p for exe_p in fs.find(spec.prefix, exe) + if fs.is_exe(exe_p)] + if exe_path: + ret = spack.util.executable.Executable(exe_path[0]) + envmod = EnvironmentModifications() + for dep in spec.traverse(root=True, order='post'): + envmod.extend(uenv.environment_modifications_for_spec(dep)) + ret.add_default_envmod(envmod) + return ret + + _raise_error(exe, spec) + + +@contextlib.contextmanager +def ensure_bootstrap_configuration(): + # Default configuration scopes excluding command + # line and builtin + config_scopes = [ + spack.config.ConfigScope(name, path) + for name, path in spack.config.configuration_paths + ] + + with spack.architecture.use_platform(spack.architecture.real_platform()): + with spack.config.use_configuration(*config_scopes): + with spack.repo.use_repositories(spack.paths.packages_path): + with spack.store.use_store(spack.paths.user_bootstrap_store): + with spack_python_interpreter(): + yield diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index 76159d88a1..2616642732 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -233,7 +233,28 @@ class PythonPackage(PackageBase): if ('py-setuptools' == spec.name or # this is setuptools, or 'py-setuptools' in spec._dependencies and # it's an immediate dep 'build' in spec._dependencies['py-setuptools'].deptypes): - args += ['--single-version-externally-managed', '--root=/'] + args += ['--single-version-externally-managed'] + + # Get all relative paths since we set the root to `prefix` + # We query the python with which these will be used for the lib and inc + # directories. This ensures we use `lib`/`lib64` as expected by python. + python = spec['python'].package.command + command_start = 'print(distutils.sysconfig.' + commands = ';'.join([ + 'import distutils.sysconfig', + command_start + 'get_python_lib(plat_specific=False, prefix=""))', + command_start + 'get_python_lib(plat_specific=True, prefix=""))', + command_start + 'get_python_inc(plat_specific=True, prefix=""))']) + pure_site_packages_dir, plat_site_packages_dir, inc_dir = python( + '-c', commands, output=str, error=str).strip().split('\n') + + args += ['--root=%s' % prefix, + '--install-purelib=%s' % pure_site_packages_dir, + '--install-platlib=%s' % plat_site_packages_dir, + '--install-scripts=bin', + '--install-data=""', + '--install-headers=%s' % inc_dir + ] return args diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index f69b959293..2e1ec1255f 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -10,11 +10,12 @@ import shutil import llnl.util.tty as tty import spack.caches +import spack.config import spack.cmd.test import spack.cmd.common.arguments as arguments +import spack.main import spack.repo import spack.stage -import spack.config from spack.paths import lib_path, var_path @@ -26,7 +27,7 @@ level = "long" class AllClean(argparse.Action): """Activates flags -s -d -f -m and -p simultaneously""" def __call__(self, parser, namespace, values, option_string=None): - parser.parse_args(['-sdfmp'], namespace=namespace) + parser.parse_args(['-sdfmpb'], namespace=namespace) def setup_parser(subparser): @@ -46,7 +47,10 @@ def setup_parser(subparser): '-p', '--python-cache', action='store_true', help="remove .pyc, .pyo files and __pycache__ folders") subparser.add_argument( - '-a', '--all', action=AllClean, help="equivalent to -sdfmp", nargs=0 + '-b', '--bootstrap', action='store_true', + help="remove software needed to bootstrap Spack") + subparser.add_argument( + '-a', '--all', action=AllClean, help="equivalent to -sdfmpb", nargs=0 ) arguments.add_common_arguments(subparser, ['specs']) @@ -54,7 +58,7 @@ def setup_parser(subparser): def clean(parser, args): # If nothing was set, activate the default if not any([args.specs, args.stage, args.downloads, args.failures, - args.misc_cache, args.python_cache]): + args.misc_cache, args.python_cache, args.bootstrap]): args.stage = True # Then do the cleaning falling through the cases @@ -96,3 +100,10 @@ def clean(parser, args): dname = os.path.join(root, d) tty.debug('Removing {0}'.format(dname)) shutil.rmtree(dname) + + if args.bootstrap: + msg = 'Removing software in "{0}"' + tty.msg(msg.format(spack.paths.user_bootstrap_store)) + with spack.store.use_store(spack.paths.user_bootstrap_store): + uninstall = spack.main.SpackCommand('uninstall') + uninstall('-a', '-y') diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index d9d51b853c..f770a02b25 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -109,6 +109,10 @@ def setup_parser(subparser): subparser.add_argument( '--end-date', help='latest date of installation [YYYY-MM-DD]' ) + subparser.add_argument( + '-b', '--bootstrap', action='store_true', + help='show software in the internal bootstrap store' + ) arguments.add_common_arguments(subparser, ['constraint']) @@ -201,7 +205,14 @@ def display_env(env, args, decorator): def find(parser, args): q_args = query_arguments(args) - results = args.specs(**q_args) + # Query the current store or the internal bootstrap store if required + if args.bootstrap: + msg = 'Showing internal bootstrap store at "{0}"' + tty.msg(msg.format(spack.paths.user_bootstrap_store)) + with spack.store.use_store(spack.paths.user_bootstrap_store): + results = args.specs(**q_args) + else: + results = args.specs(**q_args) decorator = lambda s, f: f added = set() diff --git a/lib/spack/spack/paths.py b/lib/spack/spack/paths.py index 9c803cba7e..068a7a6fdb 100644 --- a/lib/spack/spack/paths.py +++ b/lib/spack/spack/paths.py @@ -50,7 +50,8 @@ mock_packages_path = os.path.join(repos_path, "builtin.mock") #: User configuration location user_config_path = os.path.expanduser('~/.spack') - +user_bootstrap_path = os.path.join(user_config_path, 'bootstrap') +user_bootstrap_store = os.path.join(user_bootstrap_path, 'store') opt_path = os.path.join(prefix, "opt") etc_path = os.path.join(prefix, "etc") diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index ac2983cecc..8d2b2b8aa5 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1,4 +1,4 @@ -# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -22,6 +22,7 @@ try: clingo_cffi = hasattr(clingo.Symbol, '_rep') except ImportError: clingo = None + clingo_cffi = False import llnl.util.lang import llnl.util.tty as tty @@ -38,6 +39,7 @@ import spack.spec import spack.package import spack.package_prefs import spack.repo +import spack.bootstrap import spack.variant import spack.version @@ -246,7 +248,20 @@ class PyclingoDriver(object): asp (file-like): optional stream to write a text-based ASP program for debugging or verification. """ - assert clingo, "PyclingoDriver requires clingo with Python support" + global clingo + if not clingo: + # TODO: Find a way to vendor the concrete spec + # in a cross-platform way + with spack.bootstrap.ensure_bootstrap_configuration(): + generic_target = archspec.cpu.host().family + spec_str = 'clingo-bootstrap@spack+python target={0}'.format( + str(generic_target) + ) + clingo_spec = spack.spec.Spec(spec_str) + clingo_spec._old_concretize() + spack.bootstrap.make_module_available( + 'clingo', spec=clingo_spec, install=True) + import clingo self.out = asp or llnl.util.lang.Devnull() self.cores = cores @@ -1112,7 +1127,6 @@ class SpackSolverSetup(object): if not spec.architecture or not spec.architecture.target: continue - print("TTYPE:", type(platform.target(spec.target.name))) target = archspec.cpu.TARGETS.get(spec.target.name) if not target: self.target_ranges(spec, None) diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py index 6040145b64..7b77768a37 100644 --- a/lib/spack/spack/store.py +++ b/lib/spack/spack/store.py @@ -173,6 +173,16 @@ def _store(): config_dict = spack.config.get('config') root, unpadded_root, projections = parse_install_tree(config_dict) hash_length = spack.config.get('config:install_hash_length') + + # Check that the user is not trying to install software into the store + # reserved by Spack to bootstrap its own dependencies, since this would + # lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe + # user installed software) + if spack.paths.user_bootstrap_store == root: + msg = ('please change the install tree root "{0}" in your ' + 'configuration [path reserved for Spack internal use]') + raise ValueError(msg.format(root)) + return Store(root=root, unpadded_root=unpadded_root, projections=projections, diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index b370053c3c..53e0da32d7 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -528,13 +528,18 @@ class EnvironmentModifications(object): return rev - def apply_modifications(self): + def apply_modifications(self, env=None): """Applies the modifications and clears the list.""" + # Use os.environ if not specified + # Do not copy, we want to modify it in place + if env is None: + env = os.environ + modifications = self.group_by_name() # Apply modifications one variable at a time for name, actions in sorted(modifications.items()): for x in actions: - x.execute(os.environ) + x.execute(env) def shell_modifications(self, shell='sh'): """Return shell code to apply the modifications and clears the list.""" diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index 614bc1725a..fb192d6885 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -22,6 +22,8 @@ class Executable(object): def __init__(self, name): self.exe = shlex.split(str(name)) self.default_env = {} + from spack.util.environment import EnvironmentModifications # no cycle + self.default_envmod = EnvironmentModifications() self.returncode = None if not self.exe: @@ -40,6 +42,10 @@ class Executable(object): """ self.default_env[key] = value + def add_default_envmod(self, envmod): + """Set an EnvironmentModifications to use when the command is run.""" + self.default_envmod.extend(envmod) + @property def command(self): """The command-line string. @@ -76,9 +82,10 @@ class Executable(object): Keyword Arguments: _dump_env (dict): Dict to be set to the environment actually used (envisaged for testing purposes only) - env (dict): The environment to run the executable with - extra_env (dict): Extra items to add to the environment - (neither requires nor precludes env) + env (dict or EnvironmentModifications): The environment with which + to run the executable + extra_env (dict or EnvironmentModifications): Extra items to add to + the environment (neither requires nor precludes env) fail_on_error (bool): Raise an exception if the subprocess returns an error. Default is True. The return code is available as ``exe.returncode`` @@ -107,13 +114,26 @@ class Executable(object): """ # Environment env_arg = kwargs.get('env', None) - if env_arg is None: - env = os.environ.copy() - env.update(self.default_env) - else: - env = self.default_env.copy() + + # Setup default environment + env = os.environ.copy() if env_arg is None else {} + self.default_envmod.apply_modifications(env) + env.update(self.default_env) + + from spack.util.environment import EnvironmentModifications # no cycle + # Apply env argument + if isinstance(env_arg, EnvironmentModifications): + env_arg.apply_modifications(env) + elif env_arg: env.update(env_arg) - env.update(kwargs.get('extra_env', {})) + + # Apply extra env + extra_env = kwargs.get('extra_env', {}) + if isinstance(extra_env, EnvironmentModifications): + extra_env.apply_modifications(env) + else: + env.update(extra_env) + if '_dump_env' in kwargs: kwargs['_dump_env'].clear() kwargs['_dump_env'].update(env) diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index bf44039ad9..46e92a0bd5 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1,4 +1,4 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -475,7 +475,7 @@ _spack_ci_rebuild() { _spack_clean() { if $list_options then - SPACK_COMPREPLY="-h --help -s --stage -d --downloads -f --failures -m --misc-cache -p --python-cache -a --all" + SPACK_COMPREPLY="-h --help -s --stage -d --downloads -f --failures -m --misc-cache -p --python-cache -b --bootstrap -a --all" else _all_packages fi @@ -891,7 +891,7 @@ _spack_fetch() { _spack_find() { if $list_options then - SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tags -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date" + SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tags -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date -b --bootstrap" else _installed_packages fi -- cgit v1.2.3-60-g2f50 From f1c7402c64c0e3b7049517ee2d8f8be7fda9679b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 23 Mar 2021 20:29:13 +0100 Subject: bootstrap: account for platform specific configuration scopes (#22489) This change accounts for platform specific configuration scopes, like ~/.spack/linux, during bootstrapping. These scopes were previously not accounted for and that was causing issues e.g. when searching for compilers. (cherry picked from commit 413c422e53843a9e33d7b77a8c44dcfd4bf701be) --- lib/spack/spack/bootstrap.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py index 93e79fbe71..52bd810e89 100644 --- a/lib/spack/spack/bootstrap.py +++ b/lib/spack/spack/bootstrap.py @@ -171,16 +171,27 @@ def get_executable(exe, spec=None, install=False): _raise_error(exe, spec) +def _bootstrap_config_scopes(): + config_scopes = [] + for name, path in spack.config.configuration_paths: + platform = spack.architecture.platform().name + platform_scope = spack.config.ConfigScope( + '/'.join([name, platform]), os.path.join(path, platform) + ) + generic_scope = spack.config.ConfigScope(name, path) + config_scopes.extend([generic_scope, platform_scope]) + msg = '[BOOSTRAP CONFIG SCOPE] name={0}, path={1}' + tty.debug(msg.format(generic_scope.name, generic_scope.path)) + tty.debug(msg.format(platform_scope.name, platform_scope.path)) + return config_scopes + + @contextlib.contextmanager def ensure_bootstrap_configuration(): - # Default configuration scopes excluding command - # line and builtin - config_scopes = [ - spack.config.ConfigScope(name, path) - for name, path in spack.config.configuration_paths - ] - with spack.architecture.use_platform(spack.architecture.real_platform()): + # Default configuration scopes excluding command line and builtin + # but accounting for platform specific scopes + config_scopes = _bootstrap_config_scopes() with spack.config.use_configuration(*config_scopes): with spack.repo.use_repositories(spack.paths.packages_path): with spack.store.use_store(spack.paths.user_bootstrap_store): -- cgit v1.2.3-60-g2f50 From a823cffc40dda0738a9b8848c424efaa8a8ece2e Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 2 Jan 2021 20:28:46 -0800 Subject: concretizer: unify logic for spec conditionals This builds on #20638 by unifying all the places in the concretizer where things are conditional on specs. Previously, we duplicated a common spec conditional pattern for dependencies, virtual providers, conflicts, and externals. That was introduced in #20423 and refined in #20507, and roughly looked as follows. Given some directives in a package like: ```python depends_on("foo@1.0+bar", when="@2.0+variant") provides("mpi@2:", when="@1.9:") ``` We handled the `@2.0+variant` and `@1.9:` parts by generating generated `dependency_condition()`, `required_dependency_condition()`, and `imposed_dependency_condition()` facts to trigger rules like this: ```prolog dependency_conditions_hold(ID, Parent, Dependency) :- attr(Name, Arg1) : required_dependency_condition(ID, Name, Arg1); attr(Name, Arg1, Arg2) : required_dependency_condition(ID, Name, Arg1, Arg2); attr(Name, Arg1, Arg2, Arg3) : required_dependency_condition(ID, Name, Arg1, Arg2, Arg3); dependency_condition(ID, Parent, Dependency); node(Parent). ``` And we handled `foo@1.0+bar` and `mpi@2:` parts ("imposed constraints") like this: ```prolog attr(Name, Arg1, Arg2) :- dependency_conditions_hold(ID, Package, Dependency), imposed_dependency_condition(ID, Name, Arg1, Arg2). attr(Name, Arg1, Arg2, Arg3) :- dependency_conditions_hold(ID, Package, Dependency), imposed_dependency_condition(ID, Name, Arg1, Arg2, Arg3). ``` These rules were repeated with different input predicates for requirements (e.g., `required_dependency_condition`) and imposed constraints (e.g., `imposed_dependency_condition`) throughout `concretize.lp`. In #20638 it got to be a bit confusing, because we used the same `dependency_condition_holds` predicate to impose constraints on conditional dependencies and virtual providers. So, even though the pattern was repeated, some of the conditional rules were conjoined in a weird way. Instead of repeating this pattern everywhere, we now have *one* set of consolidated rules for conditions: ```prolog condition_holds(ID) :- condition(ID); attr(Name, A1) : condition_requirement(ID, Name, A1); attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2); attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3). attr(Name, A1) :- condition_holds(ID), imposed_constraint(ID, Name, A1). attr(Name, A1, A2) :- condition_holds(ID), imposed_constraint(ID, Name, A1, A2). attr(Name, A1, A2, A3) :- condition_holds(ID), imposed_constraint(ID, Name, A1, A2, A3). ``` this allows us to use `condition(ID)` and `condition_holds(ID)` to encapsulate the conditional logic on specs in all the scenarios where we need it. Instead of defining predicates for the requirements and imposed constraints, we generate the condition inputs with generic facts, and define predicates to associate the condition ID with a particular scenario. So, now, the generated facts for a condition look like this: ```prolog condition(121). condition_requirement(121,"node","cairo"). condition_requirement(121,"variant_value","cairo","fc","True"). imposed_constraint(121,"version_satisfies","fontconfig","2.10.91:"). dependency_condition(121,"cairo","fontconfig"). dependency_type(121,"build"). dependency_type(121,"link"). ``` The requirements and imposed constraints are generic, and we associate them with their meaning via the id. Here, `dependency_condition(121, "cairo", "fontconfig")` tells us that condition 121 has to do with the dependency of `cairo` on `fontconfig`, and the conditional dependency rules just become: ```prolog dependency_holds(Package, Dependency, Type) :- dependency_condition(ID, Package, Dependency), dependency_type(ID, Type), condition_holds(ID). ``` Dependencies, virtuals, conflicts, and externals all now use similar patterns, and the logic for generating condition facts is common to all of them on the python side, as well. The more specific routines like `package_dependencies_rules` just call `self.condition(...)` to get an id and generate requirements and imposed constraints, then they generate their extra facts with the returned id, like this: ```python def package_dependencies_rules(self, pkg, tests): """Translate 'depends_on' directives into ASP logic.""" for _, conditions in sorted(pkg.dependencies.items()): for cond, dep in sorted(conditions.items()): condition_id = self.condition(cond, dep.spec, pkg.name) # create a condition and get its id self.gen.fact(fn.dependency_condition( # associate specifics about the dependency w/the id condition_id, pkg.name, dep.spec.name )) # etc. ``` - [x] unify generation and logic for conditions - [x] use unified logic for dependencies - [x] use unified logic for virtuals - [x] use unified logic for conflicts - [x] use unified logic for externals LocalWords: concretizer mpi attr Arg concretize lp cairo fc fontconfig LocalWords: virtuals def pkg cond dep fn refactor github py --- lib/spack/spack/solver/asp.py | 130 ++++++++++----------------- lib/spack/spack/solver/concretize.lp | 170 +++++++++++++++-------------------- lib/spack/spack/solver/display.lp | 2 +- 3 files changed, 119 insertions(+), 183 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 8d2b2b8aa5..61d18fc65e 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -483,28 +483,12 @@ class SpackSolverSetup(object): def conflict_rules(self, pkg): for trigger, constraints in pkg.conflicts.items(): + trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name) + self.gen.fact(fn.conflict_trigger(trigger_id)) + for constraint, _ in constraints: - constraint_body = spack.spec.Spec(pkg.name) - constraint_body.constrain(constraint) - constraint_body.constrain(trigger) - - clauses = [] - for s in constraint_body.traverse(): - clauses += self.spec_clauses(s, body=True) - - # TODO: find a better way to generate clauses for integrity - # TODO: constraints, instead of generating them for the body - # TODO: of a rule and filter unwanted functions. - to_be_filtered = ['node_compiler_hard'] - clauses = [x for x in clauses if x.name not in to_be_filtered] - - # Emit facts based on clauses - condition_id = next(self._condition_id_counter) - self.gen.fact(fn.conflict(condition_id, pkg.name)) - for clause in clauses: - self.gen.fact(fn.conflict_condition( - condition_id, clause.name, *clause.args - )) + constraint_id = self.condition(constraint, name=pkg.name) + self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id)) self.gen.newline() def available_compilers(self): @@ -642,50 +626,44 @@ class SpackSolverSetup(object): ) ) - def _condition_facts( - self, pkg_name, cond_spec, dep_spec, - cond_fn, require_fn, impose_fn - ): + def condition(self, required_spec, imposed_spec=None, name=None): """Generate facts for a dependency or virtual provider condition. Arguments: - pkg_name (str): name of the package that triggers the - condition (e.g., the dependent or the provider) - cond_spec (Spec): the dependency spec representing the - condition that needs to be True (can be anonymous) - dep_spec (Spec): the sepc of the dependency or provider - to be depended on/provided if the condition holds. - cond_fn (AspFunction): function to use to declare the condition; - will be called with the cond id, pkg_name, an dep_spec.name - require_fn (AspFunction): function to use to declare the conditions - required of the dependent/provider to trigger - impose_fn (AspFunction): function to use for constraints imposed - on the dependency/virtual + required_spec (Spec): the spec that triggers this condition + imposed_spec (optional, Spec): the sepc with constraints that + are imposed when this condition is triggered + name (optional, str): name for `required_spec` (required if + required_spec is anonymous, ignored if not) Returns: (int): id of the condition created by this function """ - condition_id = next(self._condition_id_counter) - named_cond = cond_spec.copy() - named_cond.name = named_cond.name or pkg_name + named_cond = required_spec.copy() + named_cond.name = named_cond.name or name + assert named_cond.name, "must provide name for anonymous condtions!" - self.gen.fact(cond_fn(condition_id, pkg_name, dep_spec.name)) + condition_id = next(self._condition_id_counter) + self.gen.fact(fn.condition(condition_id)) - # conditions that trigger the condition - conditions = self.checked_spec_clauses( - named_cond, body=True, required_from=pkg_name - ) - for pred in conditions: - self.gen.fact(require_fn(condition_id, pred.name, *pred.args)) + # requirements trigger the condition + requirements = self.checked_spec_clauses( + named_cond, body=True, required_from=name) + for pred in requirements: + self.gen.fact( + fn.condition_requirement(condition_id, pred.name, *pred.args) + ) - imposed_constraints = self.checked_spec_clauses( - dep_spec, required_from=pkg_name - ) - for pred in imposed_constraints: - # imposed "node"-like conditions are no-ops - if pred.name in ("node", "virtual_node"): - continue - self.gen.fact(impose_fn(condition_id, pred.name, *pred.args)) + if imposed_spec: + imposed_constraints = self.checked_spec_clauses( + imposed_spec, body=False, required_from=name) + for pred in imposed_constraints: + # imposed "node"-like conditions are no-ops + if pred.name in ("node", "virtual_node"): + continue + self.gen.fact( + fn.imposed_constraint(condition_id, pred.name, *pred.args) + ) return condition_id @@ -695,25 +673,20 @@ class SpackSolverSetup(object): for provided, whens in pkg.provided.items(): for when in whens: - self._condition_facts( - pkg.name, when, provided, - fn.provider_condition, - fn.required_provider_condition, - fn.imposed_dependency_condition - ) - + condition_id = self.condition(when, provided, pkg.name) + self.gen.fact(fn.provider_condition( + condition_id, when.name, provided.name + )) self.gen.newline() def package_dependencies_rules(self, pkg, tests): """Translate 'depends_on' directives into ASP logic.""" for _, conditions in sorted(pkg.dependencies.items()): for cond, dep in sorted(conditions.items()): - condition_id = self._condition_facts( - pkg.name, cond, dep.spec, - fn.dependency_condition, - fn.required_dependency_condition, - fn.imposed_dependency_condition - ) + condition_id = self.condition(cond, dep.spec, pkg.name) + self.gen.fact(fn.dependency_condition( + condition_id, pkg.name, dep.spec.name + )) for t in sorted(dep.type): # Skip test dependencies if they're not requested at all @@ -794,24 +767,13 @@ class SpackSolverSetup(object): pkg_name, str(version), weight, id )) + # Declare external conditions with a local index into packages.yaml for local_idx, spec in enumerate(external_specs): - condition_id = next(self._condition_id_counter) - - # Declare the global ID associated with this external spec - self.gen.fact(fn.external_spec(condition_id, pkg_name)) - - # Local index into packages.yaml + condition_id = self.condition(spec) self.gen.fact( - fn.external_spec_index(condition_id, pkg_name, local_idx)) - - # Add conditions to be satisfied for this external + fn.possible_external(condition_id, pkg_name, local_idx) + ) self.possible_versions[spec.name].add(spec.version) - clauses = self.spec_clauses(spec, body=True) - for clause in clauses: - self.gen.fact( - fn.external_spec_condition( - condition_id, clause.name, *clause.args) - ) self.gen.newline() def preferred_variants(self, pkg_name): @@ -1474,7 +1436,7 @@ class SpecBuilder(object): def no_flags(self, pkg, flag_type): self._specs[pkg].compiler_flags[flag_type] = [] - def external_spec_selected(self, condition_id, pkg, idx): + def external_spec_selected(self, pkg, idx): """This means that the external spec and index idx has been selected for this package. """ diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index d0174ca2e0..01e70b9469 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -31,16 +31,54 @@ version_satisfies(Package, Constraint) #defined preferred_version_declared/3. #defined version_satisfies/3. +%----------------------------------------------------------------------------- +% Spec conditions and imposed constraints +% +% Given Spack directives like these: +% depends_on("foo@1.0+bar", when="@2.0+variant") +% provides("mpi@2:", when="@1.9:") +% +% The conditions are `@2.0+variant` and `@1.9:`, and the imposed constraints +% are `@1.0+bar` on `foo` and `@2:` on `mpi`. +%----------------------------------------------------------------------------- +% conditions are specified with `condition_requirement` and hold when +% corresponding spec attributes hold. +condition_holds(ID) :- + condition(ID); + attr(Name, A1) : condition_requirement(ID, Name, A1); + attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2); + attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3). + +% conditions that hold impose constraints on other specs +attr(Name, A1) :- condition_holds(ID), imposed_constraint(ID, Name, A1). +attr(Name, A1, A2) :- condition_holds(ID), imposed_constraint(ID, Name, A1, A2). +attr(Name, A1, A2, A3) :- condition_holds(ID), imposed_constraint(ID, Name, A1, A2, A3). + +#defined condition/1. +#defined condition_requirement/3. +#defined condition_requirement/4. +#defined condition_requirement/5. +#defined imposed_constraint/3. +#defined imposed_constraint/4. +#defined imposed_constraint/5. + %----------------------------------------------------------------------------- % Dependency semantics %----------------------------------------------------------------------------- % Dependencies of any type imply that one package "depends on" another depends_on(Package, Dependency) :- depends_on(Package, Dependency, _). +% a dependency holds if its condition holds +dependency_holds(Package, Dependency, Type) :- + dependency_condition(ID, Package, Dependency), + dependency_type(ID, Type), + condition_holds(ID). + % declared dependencies are real if they're not virtual AND -% the package is not an external +% the package is not an external. +% They're only triggered if the associated dependnecy condition holds. depends_on(Package, Dependency, Type) - :- dependency_conditions(Package, Dependency, Type), + :- dependency_holds(Package, Dependency, Type), not virtual(Dependency), not external(Package). @@ -64,74 +102,19 @@ path(Parent, Child) :- depends_on(Parent, Child). path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant). :- path(A, B), path(B, A). -%----------------------------------------------------------------------------- -% Conditional dependencies -% -% This takes care of `when=SPEC` in `depends_on("foo@1.0+bar", when="SPEC")`. -%----------------------------------------------------------------------------- -% if any individual condition below is true, trigger the dependency. -dependency_conditions(Package, Dependency, Type) :- - dependency_conditions_hold(ID, Package, Dependency), - dependency_type(ID, Type). - #defined dependency_type/2. - -% collect all the dependency conditions into a single conditional rule -% distinguishing between Parent and Package (Arg1) is needed to account -% for conditions like: -% -% depends_on('patchelf@0.9', when='@1.0:1.1 ^python@:2') -% -% that include dependencies -dependency_conditions_hold(ID, Parent, Dependency) :- - attr(Name, Arg1) : required_dependency_condition(ID, Name, Arg1); - attr(Name, Arg1, Arg2) : required_dependency_condition(ID, Name, Arg1, Arg2); - attr(Name, Arg1, Arg2, Arg3) : required_dependency_condition(ID, Name, Arg1, Arg2, Arg3); - dependency_condition(ID, Parent, Dependency); - % There must be at least a dependency type declared, - % otherwise the dependency doesn't hold - dependency_type(ID, _); - node(Parent); - not external(Parent). - #defined dependency_condition/3. -#defined required_dependency_condition/3. -#defined required_dependency_condition/4. -#defined required_dependency_condition/5. - -%----------------------------------------------------------------------------- -% Imposed constraints on dependencies -% -% This handles the `@1.0+bar` in `depends_on("foo@1.0+bar", when="SPEC")`, or -% the `mpi@2:` in `provides("mpi@2:", when="@1.9:")`. -%----------------------------------------------------------------------------- -% NOTE: `attr(Name, Arg1)` is omitted here b/c the only single-arg attribute is -% NOTE: `node()`, which is handled above under "Dependency Semantics" - -attr(Name, Arg1, Arg2) :- - dependency_conditions_hold(ID, Package, Dependency), - imposed_dependency_condition(ID, Name, Arg1, Arg2). - -attr(Name, Arg1, Arg2, Arg3) :- - dependency_conditions_hold(ID, Package, Dependency), - imposed_dependency_condition(ID, Name, Arg1, Arg2, Arg3). - -#defined imposed_dependency_condition/4. -#defined imposed_dependency_condition/5. %----------------------------------------------------------------------------- % Conflicts %----------------------------------------------------------------------------- -:- not external(Package) : conflict_condition(ID, "node", Package); - attr(Name, Arg1) : conflict_condition(ID, Name, Arg1); - attr(Name, Arg1, Arg2) : conflict_condition(ID, Name, Arg1, Arg2); - attr(Name, Arg1, Arg2, Arg3) : conflict_condition(ID, Name, Arg1, Arg2, Arg3); - conflict(ID, Package). +:- node(Package), + not external(Package), + conflict(Package, TriggerID, ConstraintID), + condition_holds(TriggerID), + condition_holds(ConstraintID). -#defined conflict/2. -#defined conflict_condition/3. -#defined conflict_condition/4. -#defined conflict_condition/5. +#defined conflict/3. %----------------------------------------------------------------------------- % Virtual dependencies @@ -140,10 +123,15 @@ attr(Name, Arg1, Arg2, Arg3) :- % if a package depends on a virtual, it's not external and we have a % provider for that virtual then it depends on the provider depends_on(Package, Provider, Type) - :- dependency_conditions(Package, Virtual, Type), + :- dependency_holds(Package, Virtual, Type), provides_virtual(Provider, Virtual), not external(Package). +% dependencies on virtuals also imply that the virtual is a virtual node +virtual_node(Virtual) + :- dependency_holds(Package, Virtual, Type), + virtual(Virtual), not external(Package). + % if there's a virtual node, we must select one provider 1 { provides_virtual(Package, Virtual) : possible_provider(Package, Virtual) } 1 :- virtual_node(Virtual). @@ -153,32 +141,21 @@ virtual_node(Virtual) :- virtual_root(Virtual). 1 { root(Package) : provides_virtual(Package, Virtual) } 1 :- virtual_root(Virtual). -% all virtual providers come from provider conditions like this -dependency_conditions_hold(ID, Provider, Virtual) :- - attr(Name, Arg1) : required_provider_condition(ID, Name, Arg1); - attr(Name, Arg1, Arg2) : required_provider_condition(ID, Name, Arg1, Arg2); - attr(Name, Arg1, Arg2, Arg3) : required_provider_condition(ID, Name, Arg1, Arg2, Arg3); - virtual(Virtual); - provider_condition(ID, Provider, Virtual). - % The provider provides the virtual if some provider condition holds. provides_virtual(Provider, Virtual) :- provider_condition(ID, Provider, Virtual), - dependency_conditions_hold(ID, Provider, Virtual), + condition_holds(ID), virtual(Virtual). % a node that provides a virtual is a provider provider(Package, Virtual) :- node(Package), provides_virtual(Package, Virtual). -% dependencies on virtuals also imply that the virtual is a virtual node -virtual_node(Virtual) - :- dependency_conditions(Package, Virtual, Type), - virtual(Virtual), not external(Package). - % for any virtual, there can be at most one provider in the DAG 0 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual(Virtual). +#defined possible_provider/2. + %----------------------------------------------------------------------------- % Virtual dependency weights %----------------------------------------------------------------------------- @@ -302,32 +279,29 @@ external(Package) :- external_only(Package), node(Package). % a package is a real_node if it is not external real_node(Package) :- node(Package), not external(Package). -% if an external version is selected, the package is external and -% we are using the corresponding spec -external(Package) :- - version(Package, Version), version_weight(Package, Weight), - external_version_declared(Package, Version, Weight, ID). +% a package is external if we are using an external spec for it +external(Package) :- external_spec_selected(Package, _). + +% we can't use the weight for an external version if we don't use the +% corresponding external spec. +:- version(Package, Version), + version_weight(Package, Weight), + external_version_declared(Package, Version, Weight, ID), + not external(Package). % determine if an external spec has been selected -external_spec_selected(ID, Package, LocalIndex) :- - version(Package, Version), version_weight(Package, Weight), - external_spec_index(ID, Package, LocalIndex), - external_version_declared(Package, Version, Weight, LocalIndex), - external_spec_conditions_hold(ID, Package). - -% determine if all the conditions on an external spec hold. If they do -% the spec can be selected. -external_spec_conditions_hold(ID, Package) :- - attr(Name, Arg1) : external_spec_condition(ID, Name, Arg1); - attr(Name, Arg1, Arg2) : external_spec_condition(ID, Name, Arg1, Arg2); - attr(Name, Arg1, Arg2, Arg3) : external_spec_condition(ID, Name, Arg1, Arg2, Arg3); - external_spec(ID, Package); - node(Package). +external_spec_selected(Package, LocalIndex) :- + external_conditions_hold(Package, LocalIndex), + node(Package). + +external_conditions_hold(Package, LocalIndex) :- + possible_external(ID, Package, LocalIndex), condition_holds(ID). % it cannot happen that a spec is external, but none of the external specs % conditions hold. -:- external(Package), not external_spec_conditions_hold(_, Package). +:- external(Package), not external_conditions_hold(Package, _). +#defined possible_external/3. #defined external_spec_index/3. #defined external_spec_condition/3. #defined external_spec_condition/4. diff --git a/lib/spack/spack/solver/display.lp b/lib/spack/spack/solver/display.lp index 4b862a26e2..fdc4e4088c 100644 --- a/lib/spack/spack/solver/display.lp +++ b/lib/spack/spack/solver/display.lp @@ -24,4 +24,4 @@ #show compiler_weight/2. #show node_target_match/2. #show node_target_weight/2. -#show external_spec_selected/3. +#show external_spec_selected/2. -- cgit v1.2.3-60-g2f50 From 9717e0244f70892ec4cf8573bf0b88ec95840894 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 10 Feb 2021 01:26:54 -0800 Subject: bugfix: do not generate dep conditions when no dependency We only consider test dependencies some of the time. Some packages are *only* test dependencies. Spack's algorithm was previously generating dependency conditions that could hold, *even* if there was no potential dependency type. - [x] change asp.py so that this can't happen -- we now only generate dependency types for possible dependencies. --- lib/spack/spack/solver/asp.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 61d18fc65e..11b8b99518 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -683,21 +683,26 @@ class SpackSolverSetup(object): """Translate 'depends_on' directives into ASP logic.""" for _, conditions in sorted(pkg.dependencies.items()): for cond, dep in sorted(conditions.items()): + deptypes = dep.type.copy() + # Skip test dependencies if they're not requested + if not tests: + deptypes.discard("test") + + # ... or if they are requested only for certain packages + if not isinstance(tests, bool) and pkg.name not in tests: + deptypes.discard("test") + + # if there are no dependency types to be considered + # anymore, don't generate the dependency + if not deptypes: + continue + condition_id = self.condition(cond, dep.spec, pkg.name) self.gen.fact(fn.dependency_condition( condition_id, pkg.name, dep.spec.name )) - for t in sorted(dep.type): - # Skip test dependencies if they're not requested at all - if t == 'test' and not tests: - continue - - # ... or if they are requested only for certain packages - if t == 'test' and (not isinstance(tests, bool) - and pkg.name not in tests): - continue - + for t in sorted(deptypes): # there is a declared dependency of type t self.gen.fact(fn.dependency_type(condition_id, t)) -- cgit v1.2.3-60-g2f50 From 4d148a430e77a9d7842f3c02797e760dd87c8f63 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 13 Mar 2021 16:03:50 -0800 Subject: bugfix: allow imposed constraints to be overridden in special cases In most cases, we want condition_holds(ID) to imply any imposed constraints associated with the ID. However, the dependency relationship in Spack is special because it's "extra" conditional -- a dependency *condition* may hold, but we have decided that externals will not have dependencies, so we need a way to avoid having imposed constraints appear for nodes that don't exist. This introduces a new rule that says that constraints are imposed *unless* we define `do_not_impose(ID)`. This allows rules like dependencies, which rely on more than just spec conditions, to cancel imposed constraints. We add one special case for this: dependencies of externals. --- lib/spack/spack/solver/concretize.lp | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 01e70b9469..492a7c817a 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -49,10 +49,14 @@ condition_holds(ID) :- attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2); attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3). +% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID) +% is derived. This allows imposed constraints to be canceled in special cases. +impose(ID) :- condition_holds(ID), not do_not_impose(ID). + % conditions that hold impose constraints on other specs -attr(Name, A1) :- condition_holds(ID), imposed_constraint(ID, Name, A1). -attr(Name, A1, A2) :- condition_holds(ID), imposed_constraint(ID, Name, A1, A2). -attr(Name, A1, A2, A3) :- condition_holds(ID), imposed_constraint(ID, Name, A1, A2, A3). +attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1). +attr(Name, A1, A2) :- impose(ID), imposed_constraint(ID, Name, A1, A2). +attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3). #defined condition/1. #defined condition_requirement/3. @@ -72,15 +76,21 @@ depends_on(Package, Dependency) :- depends_on(Package, Dependency, _). dependency_holds(Package, Dependency, Type) :- dependency_condition(ID, Package, Dependency), dependency_type(ID, Type), - condition_holds(ID). + condition_holds(ID), + not external(Package). + +% We cut off dependencies of externals (as we don't really know them). +% Don't impose constraints on dependencies that don't exist. +do_not_impose(ID) :- + not dependency_holds(Package, Dependency, _), + dependency_condition(ID, Package, Dependency). % declared dependencies are real if they're not virtual AND % the package is not an external. % They're only triggered if the associated dependnecy condition holds. depends_on(Package, Dependency, Type) :- dependency_holds(Package, Dependency, Type), - not virtual(Dependency), - not external(Package). + not virtual(Dependency). % every root must be a node node(Package) :- root(Package). -- cgit v1.2.3-60-g2f50 From 61e619bb27428c43697ad8447fe761b486e798cb Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 17 Mar 2021 15:38:14 +0100 Subject: spack location: bugfix for out of source build dirs (#22348) --- lib/spack/spack/cmd/location.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index 60978fe404..cced5d29a0 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -110,4 +110,16 @@ def location(parser, args): tty.die("Build directory does not exist yet. " "Run this to create it:", "spack stage " + " ".join(args.spec)) - print(pkg.stage.source_path) + + # Out of source builds have build_directory defined + if hasattr(pkg, 'build_directory'): + # build_directory can be either absolute or relative + # to the stage path in either case os.path.join makes it + # absolute + print(os.path.normpath(os.path.join( + pkg.stage.path, + pkg.build_directory + ))) + else: + # Otherwise assume in-source builds + return print(pkg.stage.source_path) -- cgit v1.2.3-60-g2f50 From 31a07f9bc40300a134f77fb60a4190e3892840fd Mon Sep 17 00:00:00 2001 From: Rémi Lacroix Date: Tue, 23 Mar 2021 18:04:40 +0100 Subject: Channelflow: Fix the package. (#22483) A search and replace went wrong in 2264e30d99d8b9fbdec8fa69b594e53d8ced15a1. Thanks to @wadudmiah who reported this issue. --- var/spack/repos/builtin/packages/channelflow/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/channelflow/package.py b/var/spack/repos/builtin/packages/channelflow/package.py index 12e70b4332..ecd16053a5 100644 --- a/var/spack/repos/builtin/packages/channelflow/package.py +++ b/var/spack/repos/builtin/packages/channelflow/package.py @@ -69,7 +69,7 @@ class Channelflow(CMakePackage): } args.append('-DWITH_NETCDF:STRING={0}'.format( - netcdf_str[spec.variants['netcdf-c'].value] + netcdf_str[spec.variants['netcdf'].value] )) # Set an MPI compiler for parallel builds -- cgit v1.2.3-60-g2f50 From b11dd0478a70750f099aa46784601cdd71f0873e Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 26 Mar 2021 08:16:11 +0100 Subject: Make SingleFileScope able to repopulate the cache after clearing it (#22559) fixes #22547 SingleFileScope was not able to repopulate its cache before this change. This was affecting the configuration seen by environments using clingo bootstrapped from sources, since the bootstrapping operation involved a few cache invalidation for config files. --- lib/spack/spack/config.py | 8 ++++++ lib/spack/spack/test/config.py | 55 ++++++++++++++++++++++++++++++------------ 2 files changed, 47 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 6067d65fff..e761c20f7f 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -240,11 +240,18 @@ class SingleFileScope(ConfigScope): # } # } # } + + # This bit ensures we have read the file and have + # the raw data in memory if self._raw_data is None: self._raw_data = read_config_file(self.path, self.schema) if self._raw_data is None: return None + # Here we know we have the raw data and ensure we + # populate the sections dictionary, which may be + # cleared by the clear() method + if not self.sections: section_data = self._raw_data for key in self.yaml_path: if section_data is None: @@ -253,6 +260,7 @@ class SingleFileScope(ConfigScope): for section_key, data in section_data.items(): self.sections[section_key] = {section_key: data} + return self.sections.get(section, None) def _write_section(self, section): diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index f1081be737..10561601be 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -72,6 +72,25 @@ def write_config_file(tmpdir): return _write +@pytest.fixture() +def env_yaml(tmpdir): + """Return a sample env.yaml for test purposes""" + env_yaml = str(tmpdir.join("env.yaml")) + with open(env_yaml, 'w') as f: + f.write("""\ +env: + config: + verify_ssl: False + dirty: False + packages: + libelf: + compiler: [ 'gcc@4.5.3' ] + repos: + - /x/y/z +""") + return env_yaml + + def check_compiler_config(comps, *compiler_names): """Check that named compilers in comps match Spack's config.""" config = spack.config.get('compilers') @@ -797,23 +816,10 @@ config: scope._write_section('config') -def test_single_file_scope(tmpdir, config): - env_yaml = str(tmpdir.join("env.yaml")) - with open(env_yaml, 'w') as f: - f.write("""\ -env: - config: - verify_ssl: False - dirty: False - packages: - libelf: - compiler: [ 'gcc@4.5.3' ] - repos: - - /x/y/z -""") - +def test_single_file_scope(config, env_yaml): scope = spack.config.SingleFileScope( - 'env', env_yaml, spack.schema.env.schema, ['env']) + 'env', env_yaml, spack.schema.env.schema, ['env'] + ) with spack.config.override(scope): # from the single-file config @@ -1045,3 +1051,20 @@ def test_bad_path_double_override(config): match='Meaningless second override'): with spack.config.override('bad::double:override::directive', ''): pass + + +@pytest.mark.regression('22547') +def test_single_file_scope_cache_clearing(env_yaml): + scope = spack.config.SingleFileScope( + 'env', env_yaml, spack.schema.env.schema, ['env'] + ) + # Check that we can retrieve data from the single file scope + before = scope.get_section('config') + assert before + # Clear the cache of the Single file scope + scope.clear() + # Check that the section can be retireved again and it's + # the same as before + after = scope.get_section('config') + assert after + assert before == after -- cgit v1.2.3-60-g2f50 From e7494b627b7bfc033a944a3d024280eed9ca6c3c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 26 Mar 2021 15:22:38 +0100 Subject: ASP-based solver: model disjoint sets for multivalued variants (#22534) * ASP-based solver: avoid adding values to variants when they're set fixes #22533 fixes #21911 Added a rule that prevents any value to slip in a variant when the variant is set explicitly. This is relevant for multi-valued variants, in particular for those that have disjoint sets of values. * Ensure disjoint sets have a clear semantics for external packages --- lib/spack/spack/solver/asp.py | 9 ++++-- lib/spack/spack/solver/concretize.lp | 10 +++++++ lib/spack/spack/test/concretize.py | 32 ++++++++++++++++++++++ .../builtin.mock/packages/mvapich2/package.py | 15 ++++++++++ 4 files changed, 64 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/mvapich2/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 11b8b99518..a663e3e274 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -593,11 +593,16 @@ class SpackSolverSetup(object): values = [] elif isinstance(values, spack.variant.DisjointSetsOfValues): union = set() - for s in values.sets: + # Encode the disjoint sets in the logic program + for sid, s in enumerate(values.sets): + for value in s: + self.gen.fact(fn.variant_value_from_disjoint_sets( + pkg.name, name, value, sid + )) union.update(s) values = union - # make sure that every variant has at least one posisble value + # make sure that every variant has at least one possible value if not values: values = [variant.default] diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 492a7c817a..b875c58527 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -344,6 +344,15 @@ variant_set(Package, Variant) :- variant_set(Package, Variant, _). % A variant cannot have a value that is not also a possible value :- variant_value(Package, Variant, Value), not variant_possible_value(Package, Variant, Value). +% Some multi valued variants accept multiple values from disjoint sets. +% Ensure that we respect that constraint and we don't pick values from more +% than one set at once +:- variant_value(Package, Variant, Value1), + variant_value(Package, Variant, Value2), + variant_value_from_disjoint_sets(Package, Variant, Value1, Set1), + variant_value_from_disjoint_sets(Package, Variant, Value2, Set2), + Set1 != Set2. + % variant_set is an explicitly set variant value. If it's not 'set', % we revert to the default value. If it is set, we force the set value variant_value(Package, Variant, Value) @@ -403,6 +412,7 @@ variant_single_value(Package, "dev_path") #defined variant_possible_value/3. #defined variant_default_value_from_packages_yaml/3. #defined variant_default_value_from_package_py/3. +#defined variant_value_from_disjoint_sets/4. %----------------------------------------------------------------------------- % Platform semantics diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 2220ab9eb4..c118304e64 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1110,3 +1110,35 @@ class TestConcretize(object): s = Spec(spec_str) with pytest.raises(RuntimeError, match='not found in package'): s.concretize() + + @pytest.mark.regression('22533') + @pytest.mark.parametrize('spec_str,variant_name,expected_values', [ + # Test the default value 'auto' + ('mvapich2', 'file_systems', ('auto',)), + # Test setting a single value from the disjoint set + ('mvapich2 file_systems=lustre', 'file_systems', ('lustre',)), + # Test setting multiple values from the disjoint set + ('mvapich2 file_systems=lustre,gpfs', 'file_systems', + ('lustre', 'gpfs')), + ]) + def test_mv_variants_disjoint_sets_from_spec( + self, spec_str, variant_name, expected_values + ): + s = Spec(spec_str).concretized() + assert set(expected_values) == set(s.variants[variant_name].value) + + @pytest.mark.regression('22533') + def test_mv_variants_disjoint_sets_from_packages_yaml(self): + external_mvapich2 = { + 'mvapich2': { + 'buildable': False, + 'externals': [{ + 'spec': 'mvapich2@2.3.1 file_systems=nfs,ufs', + 'prefix': '/usr' + }] + } + } + spack.config.set('packages', external_mvapich2) + + s = Spec('mvapich2').concretized() + assert set(s.variants['file_systems'].value) == set(['ufs', 'nfs']) diff --git a/var/spack/repos/builtin.mock/packages/mvapich2/package.py b/var/spack/repos/builtin.mock/packages/mvapich2/package.py new file mode 100644 index 0000000000..dd62fc4c71 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/mvapich2/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class Mvapich2(Package): + homepage = "http://www.homepage.org" + url = "http://www.someurl" + + version('1.5', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') + + variant( + 'file_systems', + description='List of the ROMIO file systems to activate', + values=auto_or_any_combination_of('lustre', 'gpfs', 'nfs', 'ufs'), + ) -- cgit v1.2.3-60-g2f50 From 18880a668b9cbf74d892f7c3ddb2de70497b3e62 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 26 Mar 2021 18:43:41 +0100 Subject: clingo: modify recipe for bootstrapping (#22354) * clingo: modify recipe for bootstrapping Modifications: - clingo builds with shared Python only if ^python+shared - avoid building the clingo app for bootstrapping - don't link to libpython when bootstrapping * Remove option that breaks on linux * Give more hints for the current Python * Disable CLINGO_BUILD_PY_SHARED for bootstrapping * bootstrapping: try to detect the current python from std library This is much faster than calling external executables * Fix compatibility with Python 2.6 * Give hints on which compiler and OS to use when bootstrapping This change hints which compiler to use for bootstrapping clingo (either GCC or Apple Clang on MacOS). On Cray platforms it also hints to build for the frontend system, where software is meant to be installed. * Use spec_for_current_python to constrain module requirement (cherry picked from commit d5fa509b072f0e58f00eaf81c60f32958a9f1e1d) --- lib/spack/spack/bootstrap.py | 60 +++++++++++++++++++--- lib/spack/spack/solver/asp.py | 9 ++-- .../builtin/packages/clingo-bootstrap/package.py | 15 ++++++ var/spack/repos/builtin/packages/clingo/package.py | 15 ++++-- 4 files changed, 83 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py index 52bd810e89..3d51ca931e 100644 --- a/lib/spack/spack/bootstrap.py +++ b/lib/spack/spack/bootstrap.py @@ -5,6 +5,13 @@ import contextlib import os import sys +try: + import sysconfig # novm +except ImportError: + # Not supported on Python 2.6 + pass + +import archspec.cpu import llnl.util.filesystem as fs import llnl.util.tty as tty @@ -20,17 +27,33 @@ import spack.util.executable from spack.util.environment import EnvironmentModifications +def spec_for_current_python(): + """For bootstrapping purposes we are just interested in the Python + minor version (all patches are ABI compatible with the same minor) + and on whether ucs4 support has been enabled for Python 2.7 + + See: + https://www.python.org/dev/peps/pep-0513/ + https://stackoverflow.com/a/35801395/771663 + """ + version_str = '.'.join(str(x) for x in sys.version_info[:2]) + variant_str = '' + if sys.version_info[0] == 2 and sys.version_info[1] == 7: + unicode_size = sysconfig.get_config_var('Py_UNICODE_SIZE') + variant_str = '+ucs4' if unicode_size == 4 else '~ucs4' + + spec_fmt = 'python@{0} {1}' + return spec_fmt.format(version_str, variant_str) + + @contextlib.contextmanager def spack_python_interpreter(): """Override the current configuration to set the interpreter under which Spack is currently running as the only Python external spec available. """ - python_cls = type(spack.spec.Spec('python').package) python_prefix = os.path.dirname(os.path.dirname(sys.executable)) - externals = python_cls.determine_spec_details( - python_prefix, [os.path.basename(sys.executable)]) - external_python = externals[0] + external_python = spec_for_current_python() entry = { 'buildable': False, @@ -60,9 +83,10 @@ def make_module_available(module, spec=None, install=False): # We can constrain by a shortened version in place of a version range # because this spec is only used for querying or as a placeholder to be # replaced by an external that already has a concrete version. This syntax - # is not suffucient when concretizing without an external, as it will + # is not sufficient when concretizing without an external, as it will # concretize to python@X.Y instead of python@X.Y.Z - spec.constrain('^python@%d.%d' % sys.version_info[:2]) + python_requirement = '^' + spec_for_current_python() + spec.constrain(python_requirement) installed_specs = spack.store.db.query(spec, installed=True) for ispec in installed_specs: @@ -197,3 +221,27 @@ def ensure_bootstrap_configuration(): with spack.store.use_store(spack.paths.user_bootstrap_store): with spack_python_interpreter(): yield + + +def clingo_root_spec(): + # Construct the root spec that will be used to bootstrap clingo + spec_str = 'clingo-bootstrap@spack+python' + + # Add a proper compiler hint to the root spec. We use GCC for + # everything but MacOS. + if str(spack.architecture.platform()) == 'darwin': + spec_str += ' %apple-clang' + else: + spec_str += ' %gcc' + + # Add hint to use frontend operating system on Cray + if str(spack.architecture.platform()) == 'cray': + spec_str += ' os=fe' + + # Add the generic target + generic_target = archspec.cpu.host().family + spec_str += ' target={0}'.format(str(generic_target)) + + tty.debug('[BOOTSTRAP ROOT SPEC] clingo: {0}'.format(spec_str)) + + return spack.spec.Spec(spec_str) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index a663e3e274..15994b01ea 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -253,14 +253,11 @@ class PyclingoDriver(object): # TODO: Find a way to vendor the concrete spec # in a cross-platform way with spack.bootstrap.ensure_bootstrap_configuration(): - generic_target = archspec.cpu.host().family - spec_str = 'clingo-bootstrap@spack+python target={0}'.format( - str(generic_target) - ) - clingo_spec = spack.spec.Spec(spec_str) + clingo_spec = spack.bootstrap.clingo_root_spec() clingo_spec._old_concretize() spack.bootstrap.make_module_available( - 'clingo', spec=clingo_spec, install=True) + 'clingo', spec=clingo_spec, install=True + ) import clingo self.out = asp or llnl.util.lang.Devnull() self.cores = cores diff --git a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py index 014ba12927..aeef40c4e1 100644 --- a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py @@ -9,6 +9,9 @@ import spack.compilers class ClingoBootstrap(Clingo): """Clingo with some options used for bootstrapping""" + + maintainers = ['alalazo'] + variant('build_type', default='Release', values=('Release',), description='CMake build type') @@ -41,6 +44,18 @@ class ClingoBootstrap(Clingo): # Clingo needs the Python module to be usable by Spack conflicts('~python', msg='Python support is required to bootstrap Spack') + @property + def cmake_py_shared(self): + return self.define('CLINGO_BUILD_PY_SHARED', 'OFF') + + def cmake_args(self): + args = super(ClingoBootstrap, self).cmake_args() + args.extend([ + # Avoid building the clingo executable + self.define('CLINGO_BUILD_APPS', 'OFF'), + ]) + return args + def setup_build_environment(self, env): if '%apple-clang platform=darwin' in self.spec: opts = '-mmacosx-version-min=10.13' diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index 7f43f4708b..84f3c37c27 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -21,7 +21,7 @@ class Clingo(CMakePackage): url = "https://github.com/potassco/clingo/archive/v5.2.2.tar.gz" git = 'https://github.com/potassco/clingo.git' - maintainers = ["tgamblin"] + maintainers = ["tgamblin", "alalazo"] version('master', branch='master', submodules=True, preferred=True) version('spack', commit='2a025667090d71b2c9dce60fe924feb6bde8f667', submodules=True) @@ -56,10 +56,17 @@ class Clingo(CMakePackage): """Return standard CMake defines to ensure that the current spec is the one found by CMake find_package(Python, ...) """ + python_spec = self.spec['python'] + include_dir = python_spec.package.get_python_inc() return [ - '-DPython_EXECUTABLE={0}'.format(str(self.spec['python'].command)) + self.define('Python_EXECUTABLE', str(python_spec.command)), + self.define('Python_INCLUDE_DIR', include_dir) ] + @property + def cmake_py_shared(self): + return self.define('CLINGO_BUILD_PY_SHARED', 'ON') + def cmake_args(self): try: self.compiler.cxx14_flag @@ -69,10 +76,10 @@ class Clingo(CMakePackage): args = [ '-DCLINGO_REQUIRE_PYTHON=ON', '-DCLINGO_BUILD_WITH_PYTHON=ON', - '-DCLINGO_BUILD_PY_SHARED=ON', '-DPYCLINGO_USER_INSTALL=OFF', '-DPYCLINGO_USE_INSTALL_PREFIX=ON', - '-DCLINGO_BUILD_WITH_LUA=OFF' + '-DCLINGO_BUILD_WITH_LUA=OFF', + self.cmake_py_shared ] if self.spec['cmake'].satisfies('@3.16.0:'): args += self.cmake_python_hints -- cgit v1.2.3-60-g2f50 From 9f99e8ad6484165dd957857ec26c68654ba0b8f8 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 27 Mar 2021 22:22:11 +0100 Subject: Externals are preferred even when they have non-default variant values fixes #22596 Variants which are specified in an external spec are not scored negatively if they encode a non-default value. --- lib/spack/spack/solver/concretize.lp | 19 ++++++++++++++++++- lib/spack/spack/test/concretize.py | 10 ++++++++++ lib/spack/spack/test/data/config/packages.yaml | 12 +++++++++++- .../packages/external-non-default-variant/package.py | 13 +++++++++++++ .../trigger-external-non-default-variant/package.py | 12 ++++++++++++ 5 files changed, 64 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/external-non-default-variant/package.py create mode 100644 var/spack/repos/builtin.mock/packages/trigger-external-non-default-variant/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index b875c58527..9e66760695 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -360,23 +360,40 @@ variant_value(Package, Variant, Value) variant(Package, Variant), variant_set(Package, Variant, Value). -% prefer default values. +% The rules below allow us to prefer default values for variants +% whenever possible. If a variant is set in a spec, or if it is +% specified in an external, we score it as if it was a default value. variant_not_default(Package, Variant, Value, 1) :- variant_value(Package, Variant, Value), not variant_default_value(Package, Variant, Value), not variant_set(Package, Variant, Value), + not external_with_variant_set(Package, Variant, Value), node(Package). +% We are using the default value for a variant variant_not_default(Package, Variant, Value, 0) :- variant_value(Package, Variant, Value), variant_default_value(Package, Variant, Value), node(Package). +% The variant is set in the spec variant_not_default(Package, Variant, Value, 0) :- variant_value(Package, Variant, Value), variant_set(Package, Variant, Value), node(Package). +% The variant is set in an external spec +external_with_variant_set(Package, Variant, Value) + :- variant_value(Package, Variant, Value), + condition_requirement(ID, "variant_value", Package, Variant, Value), + possible_external(ID, Package, _), + external(Package), + node(Package). + +variant_not_default(Package, Variant, Value, 0) + :- variant_value(Package, Variant, Value), + external_with_variant_set(Package, Variant, Value), + node(Package). % The default value for a variant in a package is what is written % in the package.py file, unless some preference is set in packages.yaml diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index c118304e64..f5294ae596 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1142,3 +1142,13 @@ class TestConcretize(object): s = Spec('mvapich2').concretized() assert set(s.variants['file_systems'].value) == set(['ufs', 'nfs']) + + @pytest.mark.regression('22596') + def test_external_with_non_default_variant_as_dependency(self): + # This package depends on another that is registered as an external + # with 'buildable: true' and a variant with a non-default value set + s = Spec('trigger-external-non-default-variant').concretized() + + assert '~foo' in s['external-non-default-variant'] + assert '~bar' in s['external-non-default-variant'] + assert s['external-non-default-variant'].external diff --git a/lib/spack/spack/test/data/config/packages.yaml b/lib/spack/spack/test/data/config/packages.yaml index 83f8cf1bb3..496c3623cc 100644 --- a/lib/spack/spack/test/data/config/packages.yaml +++ b/lib/spack/spack/test/data/config/packages.yaml @@ -35,4 +35,14 @@ packages: - spec: external-buildable-with-variant@1.1.special +baz prefix: /usr - spec: external-buildable-with-variant@0.9 +baz - prefix: /usr \ No newline at end of file + prefix: /usr + 'old-external': + buildable: True + externals: + - spec: old-external@1.0.0 + prefix: /usr + 'external-non-default-variant': + buildable: True + externals: + - spec: external-non-default-variant@3.8.7~foo~bar + prefix: /usr diff --git a/var/spack/repos/builtin.mock/packages/external-non-default-variant/package.py b/var/spack/repos/builtin.mock/packages/external-non-default-variant/package.py new file mode 100644 index 0000000000..f5a4e0f07d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/external-non-default-variant/package.py @@ -0,0 +1,13 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class ExternalNonDefaultVariant(Package): + """An external that is registered with a non-default value""" + homepage = "http://www.python.org" + url = "http://www.python.org/ftp/python/3.8.7/Python-3.8.7.tgz" + + version('3.8.7', 'be78e48cdfc1a7ad90efff146dce6cfe') + + variant('foo', default=True, description='just a variant') + variant('bar', default=True, description='just a variant') diff --git a/var/spack/repos/builtin.mock/packages/trigger-external-non-default-variant/package.py b/var/spack/repos/builtin.mock/packages/trigger-external-non-default-variant/package.py new file mode 100644 index 0000000000..8b2b2be70d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/trigger-external-non-default-variant/package.py @@ -0,0 +1,12 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class TriggerExternalNonDefaultVariant(Package): + """This ackage depends on an external with a non-default variant""" + homepage = "http://www.example.com" + url = "http://www.someurl.tar.gz" + + version('1.0', 'foobarbaz') + + depends_on('external-non-default-variant') -- cgit v1.2.3-60-g2f50 From 6e714808fa1c461d9c7e4be5502ffcb9cb2e9a66 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 26 Mar 2021 11:17:04 +0100 Subject: Enforce uniqueness of the version_weight atom per node fixes #22565 This change enforces the uniqueness of the version_weight atom per node(Package) in the DAG. It does so by applying FTSE and adding an extra layer of indirection with the possible_version_weight/2 atom. Before this change it may have happened that for the same node two different version_weight/2 were in the answer set, each of which referred to a different spec with the same version, and their weights would sum up. This lead to unexpected result like preferring to build a new version of an external if the external version was older. --- lib/spack/spack/solver/asp.py | 23 ++++++++++++++++++---- lib/spack/spack/solver/concretize.lp | 6 ++++-- lib/spack/spack/test/concretize.py | 3 +++ .../builtin.mock/packages/old-external/package.py | 17 ++++++++++++++++ 4 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/old-external/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 15994b01ea..9ccbbaf9b1 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -393,6 +393,8 @@ class SpackSolverSetup(object): def __init__(self): self.gen = None # set by setup() self.possible_versions = {} + self.versions_in_package_py = {} + self.versions_from_externals = {} self.possible_virtuals = None self.possible_compilers = [] self.variant_values_from_specs = set() @@ -446,9 +448,18 @@ class SpackSolverSetup(object): # c) Numeric or string comparison v) - most_to_least_preferred = sorted( - self.possible_versions[pkg.name], key=keyfn, reverse=True - ) + # Compute which versions appear only in packages.yaml + from_externals = self.versions_from_externals[pkg.name] + from_package_py = self.versions_in_package_py[pkg.name] + only_from_externals = from_externals - from_package_py + + # These versions don't need a default weight, as they are + # already weighted in a more favorable way when accounting + # for externals. Assigning them a default weight would be + # equivalent to state that they are also declared in + # the package.py file + considered = self.possible_versions[pkg.name] - only_from_externals + most_to_least_preferred = sorted(considered, key=keyfn, reverse=True) for i, v in enumerate(most_to_least_preferred): self.gen.fact(fn.version_declared(pkg.name, v, i)) @@ -780,6 +791,7 @@ class SpackSolverSetup(object): self.gen.fact( fn.possible_external(condition_id, pkg_name, local_idx) ) + self.versions_from_externals[spec.name].add(spec.version) self.possible_versions[spec.name].add(spec.version) self.gen.newline() @@ -987,11 +999,14 @@ class SpackSolverSetup(object): def build_version_dict(self, possible_pkgs, specs): """Declare any versions in specs not declared in packages.""" - self.possible_versions = collections.defaultdict(lambda: set()) + self.possible_versions = collections.defaultdict(set) + self.versions_in_package_py = collections.defaultdict(set) + self.versions_from_externals = collections.defaultdict(set) for pkg_name in possible_pkgs: pkg = spack.repo.get(pkg_name) for v in pkg.versions: + self.versions_in_package_py[pkg_name].add(v) self.possible_versions[pkg_name].add(v) for spec in specs: diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 9e66760695..ee69e9798f 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -14,13 +14,15 @@ version_declared(Package, Version) :- version_declared(Package, Version, _). 1 { version(Package, Version) : version_declared(Package, Version) } 1 :- node(Package). -version_weight(Package, Weight) +possible_version_weight(Package, Weight) :- version(Package, Version), version_declared(Package, Version, Weight), not preferred_version_declared(Package, Version, _). -version_weight(Package, Weight) +possible_version_weight(Package, Weight) :- version(Package, Version), preferred_version_declared(Package, Version, Weight). +1 { version_weight(Package, Weight) : possible_version_weight(Package, Weight) } 1 :- node(Package). + % version_satisfies implies that exactly one of the satisfying versions % is the package's version, and vice versa. 1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1 diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index f5294ae596..437edcb99f 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1060,6 +1060,9 @@ class TestConcretize(object): # having an additional dependency, but the dependency shouldn't # appear in the answer set ('external-buildable-with-variant@0.9 +baz', True, '@0.9'), + # This package has an external version declared that would be + # the least preferred if Spack had to build it + ('old-external', True, '@1.0.0'), ]) def test_external_package_versions(self, spec_str, is_external, expected): s = Spec(spec_str).concretized() diff --git a/var/spack/repos/builtin.mock/packages/old-external/package.py b/var/spack/repos/builtin.mock/packages/old-external/package.py new file mode 100644 index 0000000000..cf414195a2 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/old-external/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +class OldExternal(Package): + """A package that has an old version declared in packages.yaml""" + + homepage = "https://www.example.com" + url = "https://www.example.com/old-external.tar.gz" + + version('1.2.0', '0123456789abcdef0123456789abcdef') + version('1.1.4', '0123456789abcdef0123456789abcdef') + version('1.1.3', '0123456789abcdef0123456789abcdef') + version('1.1.2', '0123456789abcdef0123456789abcdef') + version('1.1.1', '0123456789abcdef0123456789abcdef') + version('1.1.0', '0123456789abcdef0123456789abcdef') + version('1.0.0', '0123456789abcdef0123456789abcdef') -- cgit v1.2.3-60-g2f50 From a5213dabb137ac7eddbc538ee17fb2725cf59a78 Mon Sep 17 00:00:00 2001 From: Cyrus Harrison Date: Mon, 29 Mar 2021 17:09:34 -0700 Subject: bugfix for active when pkg is already active error (#22587) * bugfix for active when pkg is already active error Co-authored-by: Greg Becker --- lib/spack/spack/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index de394e2d45..8aa86098e2 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -2284,8 +2284,13 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)): extensions_layout = view.extensions_layout - extensions_layout.check_extension_conflict( - self.extendee_spec, self.spec) + try: + extensions_layout.check_extension_conflict( + self.extendee_spec, self.spec) + except spack.directory_layout.ExtensionAlreadyInstalledError as e: + # already installed, let caller know + tty.msg(e.message) + return # Activate any package dependencies that are also extensions. if with_dependencies: -- cgit v1.2.3-60-g2f50 From c8a10e4910f8222e34b22b2e37c26ace4bae968f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 30 Mar 2021 13:23:39 +0200 Subject: Fix clearing cache of InternalConfigScope (#22609) Co-authored-by: Massimiliano Culpo --- lib/spack/spack/config.py | 4 ++ lib/spack/spack/test/cmd/common/arguments.py | 64 +++++++++++----------------- lib/spack/spack/test/config.py | 21 +++++++++ 3 files changed, 49 insertions(+), 40 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index e761c20f7f..8794072e96 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -361,6 +361,10 @@ class InternalConfigScope(ConfigScope): def __repr__(self): return '' % self.name + def clear(self): + # no cache to clear here. + pass + @staticmethod def _process_dict_keyname_overrides(data): """Turn a trailing `:' in a key name into an override attribute.""" diff --git a/lib/spack/spack/test/cmd/common/arguments.py b/lib/spack/spack/test/cmd/common/arguments.py index 76e39bcad0..6646144a81 100644 --- a/lib/spack/spack/test/cmd/common/arguments.py +++ b/lib/spack/spack/test/cmd/common/arguments.py @@ -15,53 +15,37 @@ import spack.config @pytest.fixture() -def parser(): +def job_parser(): + # --jobs needs to write to a command_line config scope, so this is the only + # scope we create. p = argparse.ArgumentParser() arguments.add_common_arguments(p, ['jobs']) - yield p - # Cleanup the command line scope if it was set during tests - spack.config.config.clear_caches() - if 'command_line' in spack.config.config.scopes: - spack.config.config.scopes['command_line'].clear() - - -@pytest.fixture(params=[1, 2, 4, 8, 16, 32]) -def ncores(monkeypatch, request): - """Mocks having a machine with n cores for the purpose of - computing config:build_jobs. - """ - def _cpu_count(): - return request.param - - # Patch multiprocessing.cpu_count() to return the value we need - monkeypatch.setattr(multiprocessing, 'cpu_count', _cpu_count) - # Patch the configuration parts that have been cached already - monkeypatch.setitem(spack.config.config_defaults['config'], - 'build_jobs', min(16, request.param)) - monkeypatch.setitem( - spack.config.config.scopes, '_builtin', - spack.config.InternalConfigScope( - '_builtin', spack.config.config_defaults - )) - return request.param - - -@pytest.mark.parametrize('cli_args,requested', [ - (['-j', '24'], 24), - # Here we report the default if we have enough cores, as the cap - # on the available number of cores will be taken care of in the test - ([], 16) -]) -def test_setting_parallel_jobs(parser, cli_args, ncores, requested): - expected = min(requested, ncores) - namespace = parser.parse_args(cli_args) + scopes = [spack.config.InternalConfigScope('command_line', {'config': {}})] + + with spack.config.use_configuration(*scopes): + yield p + + +@pytest.mark.parametrize("ncores", [1, 2, 4, 8, 16, 32]) +def test_setting_jobs_flag(job_parser, ncores, monkeypatch): + monkeypatch.setattr(multiprocessing, 'cpu_count', lambda: ncores) + namespace = job_parser.parse_args(['-j', '24']) + expected = min(24, ncores) assert namespace.jobs == expected assert spack.config.get('config:build_jobs') == expected -def test_negative_integers_not_allowed_for_parallel_jobs(parser): +@pytest.mark.parametrize("ncores", [1, 2, 4, 8, 16, 32]) +def test_omitted_job_flag(job_parser, ncores, monkeypatch): + monkeypatch.setattr(multiprocessing, 'cpu_count', lambda: ncores) + namespace = job_parser.parse_args([]) + assert namespace.jobs == min(ncores, 16) + assert spack.config.get('config:build_jobs') is None + + +def test_negative_integers_not_allowed_for_parallel_jobs(job_parser): with pytest.raises(ValueError) as exc_info: - parser.parse_args(['-j', '-2']) + job_parser.parse_args(['-j', '-2']) assert 'expected a positive integer' in str(exc_info.value) diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 10561601be..9384eb5b47 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -1068,3 +1068,24 @@ def test_single_file_scope_cache_clearing(env_yaml): after = scope.get_section('config') assert after assert before == after + + +@pytest.mark.regression('22611') +def test_internal_config_scope_cache_clearing(): + """ + An InternalConfigScope object is constructed from data that is already + in memory, therefore it doesn't have any cache to clear. Here we ensure + that calling the clear method is consistent with that.. + """ + data = { + 'config': { + 'build_jobs': 10 + } + } + internal_scope = spack.config.InternalConfigScope('internal', data) + # Ensure that the initial object is properly set + assert internal_scope.sections['config'] == data + # Call the clear method + internal_scope.clear() + # Check that this didn't affect the scope object + assert internal_scope.sections['config'] == data -- cgit v1.2.3-60-g2f50 From cbd55332e37ccc21bc2399bb638cfe87f9d897b2 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 30 Mar 2021 13:41:34 +0200 Subject: Bootstrap: add _builtin config scope (#22610) (cherry picked from commit a37c916dff5a5c6e72f939433931ab69dfd731bd) --- lib/spack/spack/bootstrap.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py index 3d51ca931e..ff4ddedef5 100644 --- a/lib/spack/spack/bootstrap.py +++ b/lib/spack/spack/bootstrap.py @@ -196,7 +196,10 @@ def get_executable(exe, spec=None, install=False): def _bootstrap_config_scopes(): - config_scopes = [] + tty.debug('[BOOTSTRAP CONFIG SCOPE] name=_builtin') + config_scopes = [ + spack.config.InternalConfigScope('_builtin', spack.config.config_defaults) + ] for name, path in spack.config.configuration_paths: platform = spack.architecture.platform().name platform_scope = spack.config.ConfigScope( @@ -204,7 +207,7 @@ def _bootstrap_config_scopes(): ) generic_scope = spack.config.ConfigScope(name, path) config_scopes.extend([generic_scope, platform_scope]) - msg = '[BOOSTRAP CONFIG SCOPE] name={0}, path={1}' + msg = '[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}' tty.debug(msg.format(generic_scope.name, generic_scope.path)) tty.debug(msg.format(platform_scope.name, platform_scope.path)) return config_scopes -- cgit v1.2.3-60-g2f50 From 43cea1b35444f596d75aaad1a1a0bf02a2ba347a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 30 Mar 2021 17:23:32 +0200 Subject: Bootstrapping: swap store before configuration (#22631) fixes #22294 A combination of the swapping order for global variables and the fact that most of them are lazily evaluated resulted in custom install tree not being taken into account if clingo had to be bootstrapped. This commit fixes that particular issue, but a broader refactor may be needed to ensure that similar situations won't affect us in the future. --- lib/spack/spack/bootstrap.py | 12 ++++++------ lib/spack/spack/store.py | 13 +++++++++++++ lib/spack/spack/test/bootstrap.py | 26 ++++++++++++++++++++++++++ 3 files changed, 45 insertions(+), 6 deletions(-) create mode 100644 lib/spack/spack/test/bootstrap.py diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py index ff4ddedef5..ce9b720163 100644 --- a/lib/spack/spack/bootstrap.py +++ b/lib/spack/spack/bootstrap.py @@ -216,12 +216,12 @@ def _bootstrap_config_scopes(): @contextlib.contextmanager def ensure_bootstrap_configuration(): with spack.architecture.use_platform(spack.architecture.real_platform()): - # Default configuration scopes excluding command line and builtin - # but accounting for platform specific scopes - config_scopes = _bootstrap_config_scopes() - with spack.config.use_configuration(*config_scopes): - with spack.repo.use_repositories(spack.paths.packages_path): - with spack.store.use_store(spack.paths.user_bootstrap_store): + with spack.repo.use_repositories(spack.paths.packages_path): + with spack.store.use_store(spack.paths.user_bootstrap_store): + # Default configuration scopes excluding command line + # and builtin but accounting for platform specific scopes + config_scopes = _bootstrap_config_scopes() + with spack.config.use_configuration(*config_scopes): with spack_python_interpreter(): yield diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py index 7b77768a37..6dbd2befb9 100644 --- a/lib/spack/spack/store.py +++ b/lib/spack/spack/store.py @@ -216,6 +216,19 @@ db = llnl.util.lang.LazyReference(_store_db) layout = llnl.util.lang.LazyReference(_store_layout) +def reinitialize(): + """Restore globals to the same state they would have at start-up""" + global store + global root, unpadded_root, db, layout + + store = llnl.util.lang.Singleton(_store) + + root = llnl.util.lang.LazyReference(_store_root) + unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root) + db = llnl.util.lang.LazyReference(_store_db) + layout = llnl.util.lang.LazyReference(_store_layout) + + def retrieve_upstream_dbs(): other_spack_instances = spack.config.get('upstreams', {}) diff --git a/lib/spack/spack/test/bootstrap.py b/lib/spack/spack/test/bootstrap.py new file mode 100644 index 0000000000..56083b327f --- /dev/null +++ b/lib/spack/spack/test/bootstrap.py @@ -0,0 +1,26 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pytest + +import spack.bootstrap +import spack.store + + +@pytest.mark.regression('22294') +def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir): + # Prepare a custom store path. This should be in a writeable location + # since Spack needs to initialize the DB. + user_path = str(tmpdir.join('store')) + # Reassign global variables in spack.store to the value + # they would have at Spack startup. + spack.store.reinitialize() + # Set the custom user path + spack.config.set('config:install_tree:root', user_path) + + # Test that within the context manager we use the bootstrap store + # and that outside we restore the correct location + with spack.bootstrap.ensure_bootstrap_configuration(): + assert str(spack.store.root) == spack.paths.user_bootstrap_store + assert str(spack.store.root) == user_path -- cgit v1.2.3-60-g2f50 From 2496c7b514a500da032218960cf8b8e50cc7d29d Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 6 Apr 2021 00:13:54 -0500 Subject: Remove erroneous warnings about quotes for from_source_file (#22767) --- lib/spack/spack/util/environment.py | 4 +++- lib/spack/spack/util/executable.py | 24 +++++++++++++++--------- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index 53e0da32d7..c4e7ea3260 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -942,7 +942,9 @@ def environment_after_sourcing_files(*files, **kwargs): source_file, suppress_output, concatenate_on_success, dump_environment, ]) - output = shell(source_file_arguments, output=str, env=environment) + output = shell( + source_file_arguments, output=str, env=environment, ignore_quotes=True + ) environment = json.loads(output) # If we're in python2, convert to str objects instead of unicode diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index fb192d6885..c01ae1b61b 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -92,6 +92,8 @@ class Executable(object): ignore_errors (int or list): A list of error codes to ignore. If these codes are returned, this process will not raise an exception even if ``fail_on_error`` is set to ``True`` + ignore_quotes (bool): If False, warn users that quotes are not needed + as Spack does not use a shell. Defaults to False. input: Where to read stdin from output: Where to send stdout error: Where to send stderr @@ -140,6 +142,7 @@ class Executable(object): fail_on_error = kwargs.pop('fail_on_error', True) ignore_errors = kwargs.pop('ignore_errors', ()) + ignore_quotes = kwargs.pop('ignore_quotes', False) # If they just want to ignore one error code, make it a tuple. if isinstance(ignore_errors, int): @@ -164,15 +167,18 @@ class Executable(object): estream, close_estream = streamify(error, 'w') istream, close_istream = streamify(input, 'r') - quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)] - if quoted_args: - tty.warn( - "Quotes in command arguments can confuse scripts like" - " configure.", - "The following arguments may cause problems when executed:", - str("\n".join([" " + arg for arg in quoted_args])), - "Quotes aren't needed because spack doesn't use a shell.", - "Consider removing them") + if not ignore_quotes: + quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)] + if quoted_args: + tty.warn( + "Quotes in command arguments can confuse scripts like" + " configure.", + "The following arguments may cause problems when executed:", + str("\n".join([" " + arg for arg in quoted_args])), + "Quotes aren't needed because spack doesn't use a shell. " + "Consider removing them.", + "If multiple levels of quotation are required, use " + "`ignore_quotes=True`.") cmd = self.exe + list(args) -- cgit v1.2.3-60-g2f50 From 5546b22c70585c956ad9fd88f68d8139ff966ded Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 23 Feb 2021 11:45:50 -0800 Subject: "spack build-env" searches env for relevant spec (#21642) If you install packages using spack install in an environment with complex spec constraints, and the install fails, you may want to test out the build using spack build-env; one issue (particularly if you use concretize: together) is that it may be hard to pass the appropriate spec that matches what the environment is attempting to install. This updates the build-env command to default to pulling a matching spec from the environment rather than concretizing what the user provides on the command line independently. This makes a similar change to spack cd. If the user-provided spec matches multiple specs in the environment, then these commands will now report an error and display all matching specs (to help the user specify). Co-authored-by: Gregory Becker --- lib/spack/spack/cmd/__init__.py | 13 ++++++ lib/spack/spack/cmd/common/env_utility.py | 4 +- lib/spack/spack/cmd/location.py | 5 +-- lib/spack/spack/environment.py | 61 ++++++++++++++++++++++++++++ lib/spack/spack/test/cmd/common/arguments.py | 38 +++++++++++++++++ 5 files changed, 117 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 130abea4cc..d728325699 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -181,6 +181,19 @@ def parse_specs(args, **kwargs): raise spack.error.SpackError(msg) +def matching_spec_from_env(spec): + """ + Returns a concrete spec, matching what is available in the environment. + If no matching spec is found in the environment (or if no environment is + active), this will return the given spec but concretized. + """ + env = spack.environment.get_env({}, cmd_name) + if env: + return env.matching_spec(spec) or spec.concretized() + else: + return spec.concretized() + + def elide_list(line_list, max_num=10): """Takes a long list and limits it to a smaller number of elements, replacing intervening elements with '...'. For example:: diff --git a/lib/spack/spack/cmd/common/env_utility.py b/lib/spack/spack/cmd/common/env_utility.py index e3f32737b4..a16cc3ba0d 100644 --- a/lib/spack/spack/cmd/common/env_utility.py +++ b/lib/spack/spack/cmd/common/env_utility.py @@ -53,11 +53,13 @@ def emulate_env_utility(cmd_name, context, args): spec = args.spec[0] cmd = args.spec[1:] - specs = spack.cmd.parse_specs(spec, concretize=True) + specs = spack.cmd.parse_specs(spec, concretize=False) if len(specs) > 1: tty.die("spack %s only takes one spec." % cmd_name) spec = specs[0] + spec = spack.cmd.matching_spec_from_env(spec) + build_environment.setup_package(spec.package, args.dirty, context) if args.dump: diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index cced5d29a0..4981056265 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -98,9 +98,8 @@ def location(parser, args): print(spack.repo.path.dirname_for_package_name(spec.name)) else: - # These versions need concretized specs. - spec.concretize() - pkg = spack.repo.get(spec) + spec = spack.cmd.matching_spec_from_env(spec) + pkg = spec.package if args.stage_dir: print(pkg.stage.path) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 868ebb6ac5..7f7625f2e6 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1505,6 +1505,67 @@ class Environment(object): for s, h in zip(self.concretized_user_specs, self.concretized_order): yield (s, self.specs_by_hash[h]) + def matching_spec(self, spec): + """ + Given a spec (likely not concretized), find a matching concretized + spec in the environment. + + The matching spec does not have to be installed in the environment, + but must be concrete (specs added with `spack add` without an + intervening `spack concretize` will not be matched). + + If there is a single root spec that matches the provided spec or a + single dependency spec that matches the provided spec, then the + concretized instance of that spec will be returned. + + If multiple root specs match the provided spec, or no root specs match + and multiple dependency specs match, then this raises an error + and reports all matching specs. + """ + # Root specs will be keyed by concrete spec, value abstract + # Dependency-only specs will have value None + matches = {} + + for user_spec, concretized_user_spec in self.concretized_specs(): + if concretized_user_spec.satisfies(spec): + matches[concretized_user_spec] = user_spec + for dep_spec in concretized_user_spec.traverse(root=False): + if dep_spec.satisfies(spec): + # Don't overwrite the abstract spec if present + # If not present already, set to None + matches[dep_spec] = matches.get(dep_spec, None) + + if not matches: + return None + elif len(matches) == 1: + return list(matches.keys())[0] + + root_matches = dict((concrete, abstract) + for concrete, abstract in matches.items() + if abstract) + + if len(root_matches) == 1: + return root_matches[0][1] + + # More than one spec matched, and either multiple roots matched or + # none of the matches were roots + # If multiple root specs match, it is assumed that the abstract + # spec will most-succinctly summarize the difference between them + # (and the user can enter one of these to disambiguate) + match_strings = [] + fmt_str = '{hash:7} ' + spack.spec.default_format + for concrete, abstract in matches.items(): + if abstract: + s = 'Root spec %s\n %s' % (abstract, concrete.format(fmt_str)) + else: + s = 'Dependency spec\n %s' % concrete.format(fmt_str) + match_strings.append(s) + matches_str = '\n'.join(match_strings) + + msg = ("{0} matches multiple specs in the environment {1}: \n" + "{2}".format(str(spec), self.name, matches_str)) + raise SpackEnvironmentError(msg) + def removed_specs(self): """Tuples of (user spec, concrete spec) for all specs that will be removed on nexg concretize.""" diff --git a/lib/spack/spack/test/cmd/common/arguments.py b/lib/spack/spack/test/cmd/common/arguments.py index 6646144a81..4f6cd1a527 100644 --- a/lib/spack/spack/test/cmd/common/arguments.py +++ b/lib/spack/spack/test/cmd/common/arguments.py @@ -12,6 +12,7 @@ import pytest import spack.cmd import spack.cmd.common.arguments as arguments import spack.config +import spack.environment as ev @pytest.fixture() @@ -65,3 +66,40 @@ def test_parse_spec_flags_with_spaces( assert all(x not in s.variants for x in unexpected_variants) assert all(x in s.variants for x in expected_variants) + + +@pytest.mark.usefixtures('config') +def test_match_spec_env(mock_packages, mutable_mock_env_path): + """ + Concretize a spec with non-default options in an environment. Make + sure that when we ask for a matching spec when the environment is + active that we get the instance concretized in the environment. + """ + # Initial sanity check: we are planning on choosing a non-default + # value, so make sure that is in fact not the default. + check_defaults = spack.cmd.parse_specs(['a'], concretize=True)[0] + assert not check_defaults.satisfies('foobar=baz') + + e = ev.create('test') + e.add('a foobar=baz') + e.concretize() + with e: + env_spec = spack.cmd.matching_spec_from_env( + spack.cmd.parse_specs(['a'])[0]) + assert env_spec.satisfies('foobar=baz') + assert env_spec.concrete + + +@pytest.mark.usefixtures('config') +def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path): + e = ev.create('test') + e.add('a foobar=baz') + e.add('a foobar=fee') + e.concretize() + with e: + with pytest.raises( + spack.environment.SpackEnvironmentError) as exc_info: + + spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(['a'])[0]) + + assert 'matches multiple specs' in exc_info.value.message -- cgit v1.2.3-60-g2f50 From 2655e21bd00ff10ea20b447eb067c6411e52b3e8 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sun, 11 Apr 2021 09:01:09 +0200 Subject: ASP-based solver: assign OS correctly with inheritance from parent (#22896) fixes #22871 When in presence of multiple choices for the operating system we were lacking a rule to derive the node OS if it was inherited. --- lib/spack/spack/solver/concretize.lp | 2 ++ lib/spack/spack/test/concretize.py | 16 ++++++++++++++++ lib/spack/spack/test/data/config/compilers.yaml | 10 ++++++++++ 3 files changed, 28 insertions(+) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index ee69e9798f..3d250ac821 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -472,6 +472,8 @@ node_os_inherit(Dependency, OS) not node_os_set(Dependency). node_os_inherit(Package) :- node_os_inherit(Package, _). +node_os(Package, OS) :- node_os_inherit(Package, OS). + % fall back to default if not set or inherited node_os(Package, OS) :- node(Package), diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 437edcb99f..56e111e220 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1155,3 +1155,19 @@ class TestConcretize(object): assert '~foo' in s['external-non-default-variant'] assert '~bar' in s['external-non-default-variant'] assert s['external-non-default-variant'].external + + @pytest.mark.regression('22871') + @pytest.mark.parametrize('spec_str,expected_os', [ + ('mpileaks', 'os=debian6'), + # To trigger the bug in 22871 we need to have the same compiler + # spec available on both operating systems + ('mpileaks%gcc@4.5.0 platform=test os=debian6', 'os=debian6'), + ('mpileaks%gcc@4.5.0 platform=test os=redhat6', 'os=redhat6') + ]) + def test_os_selection_when_multiple_choices_are_possible( + self, spec_str, expected_os + ): + s = Spec(spec_str).concretized() + + for node in s.traverse(): + assert node.satisfies(expected_os) diff --git a/lib/spack/spack/test/data/config/compilers.yaml b/lib/spack/spack/test/data/config/compilers.yaml index 641331dc9f..e0b0464976 100644 --- a/lib/spack/spack/test/data/config/compilers.yaml +++ b/lib/spack/spack/test/data/config/compilers.yaml @@ -19,6 +19,16 @@ compilers: fc: None modules: 'None' target: x86_64 +- compiler: + spec: gcc@4.5.0 + operating_system: redhat6 + paths: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: None + fc: None + modules: 'None' + target: x86_64 - compiler: spec: clang@3.3 operating_system: CNL -- cgit v1.2.3-60-g2f50 From f1f94ad31abbcbff14fdbb200e86b81cdf339c82 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 12 Apr 2021 02:19:29 -0700 Subject: Externals with merged prefixes (#22653) We remove system paths from search variables like PATH and from -L options because they may contain many packages and could interfere with Spack-built packages. External packages may be installed to prefixes that are not actually system paths but are still "merged" in the sense that many other packages are installed there. To avoid conflicts, this PR places all external packages at the end of search paths. --- lib/spack/spack/build_environment.py | 62 ++++++++++++++++++++++--------- lib/spack/spack/test/build_environment.py | 40 ++++++++++++++++++++ 2 files changed, 85 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 48ce594a4b..3cf02dcbb4 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -302,6 +302,19 @@ def set_compiler_environment_variables(pkg, env): return env +def _place_externals_last(spec_container): + """ + For a (possibly unordered) container of specs, return an ordered list + where all external specs are at the end of the list. External packages + may be installed in merged prefixes with other packages, and so + they should be deprioritized for any search order (i.e. in PATH, or + for a set of -L entries in a compiler invocation). + """ + first = list(x for x in spec_container if not x.external) + second = list(x for x in spec_container if x.external) + return first + second + + def set_build_environment_variables(pkg, env, dirty): """Ensure a clean install environment when we build packages. @@ -319,6 +332,29 @@ def set_build_environment_variables(pkg, env, dirty): link_deps = set(pkg.spec.traverse(root=False, deptype=('link'))) build_link_deps = build_deps | link_deps rpath_deps = get_rpath_deps(pkg) + # This includes all build dependencies and any other dependencies that + # should be added to PATH (e.g. supporting executables run by build + # dependencies) + build_and_supporting_deps = set() + for build_dep in build_deps: + build_and_supporting_deps.update(build_dep.traverse(deptype='run')) + + # Establish an arbitrary but fixed ordering of specs so that resulting + # environment variable values are stable + def _order(specs): + return sorted(specs, key=lambda x: x.name) + + # External packages may be installed in a prefix which contains many other + # package installs. To avoid having those installations override + # Spack-installed packages, they are placed at the end of search paths. + # System prefixes are removed entirely later on since they are already + # searched. + build_deps = _place_externals_last(_order(build_deps)) + link_deps = _place_externals_last(_order(link_deps)) + build_link_deps = _place_externals_last(_order(build_link_deps)) + rpath_deps = _place_externals_last(_order(rpath_deps)) + build_and_supporting_deps = _place_externals_last( + _order(build_and_supporting_deps)) link_dirs = [] include_dirs = [] @@ -365,21 +401,10 @@ def set_build_environment_variables(pkg, env, dirty): env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs)) env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs)) - build_prefixes = [dep.prefix for dep in build_deps] - build_link_prefixes = [dep.prefix for dep in build_link_deps] - - # add run-time dependencies of direct build-time dependencies: - for build_dep in build_deps: - for run_dep in build_dep.traverse(deptype='run'): - build_prefixes.append(run_dep.prefix) - - # Filter out system paths: ['/', '/usr', '/usr/local'] - # These paths can be introduced into the build when an external package - # is added as a dependency. The problem with these paths is that they often - # contain hundreds of other packages installed in the same directory. - # If these paths come first, they can overshadow Spack installations. - build_prefixes = filter_system_paths(build_prefixes) - build_link_prefixes = filter_system_paths(build_link_prefixes) + build_and_supporting_prefixes = filter_system_paths( + x.prefix for x in build_and_supporting_deps) + build_link_prefixes = filter_system_paths( + x.prefix for x in build_link_deps) # Add dependencies to CMAKE_PREFIX_PATH env.set_path('CMAKE_PREFIX_PATH', build_link_prefixes) @@ -394,7 +419,10 @@ def set_build_environment_variables(pkg, env, dirty): env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths) # Add bin directories from dependencies to the PATH for the build. - for prefix in build_prefixes: + # These directories are added to the beginning of the search path, and in + # the order given by 'build_and_supporting_prefixes' (the iteration order + # is reversed because each entry is prepended) + for prefix in reversed(build_and_supporting_prefixes): for dirname in ['bin', 'bin64']: bin_dir = os.path.join(prefix, dirname) if os.path.isdir(bin_dir): @@ -439,7 +467,7 @@ def set_build_environment_variables(pkg, env, dirty): env.set(SPACK_CCACHE_BINARY, ccache) # Add any pkgconfig directories to PKG_CONFIG_PATH - for prefix in build_link_prefixes: + for prefix in reversed(build_link_prefixes): for directory in ('lib', 'lib64', 'share'): pcdir = os.path.join(prefix, directory, 'pkgconfig') if os.path.isdir(pcdir): diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py index a7a55af0ca..b374a7c8ce 100644 --- a/lib/spack/spack/test/build_environment.py +++ b/lib/spack/spack/test/build_environment.py @@ -11,6 +11,7 @@ import pytest import spack.build_environment import spack.config import spack.spec +import spack.util.spack_yaml as syaml from spack.paths import build_env_path from spack.build_environment import dso_suffix, _static_to_shared_library from spack.util.executable import Executable @@ -299,6 +300,45 @@ def test_set_build_environment_variables( delattr(dep_pkg, 'libs') +def test_external_prefixes_last(mutable_config, mock_packages, working_env, + monkeypatch): + # Sanity check: under normal circumstances paths associated with + # dt-diamond-left would appear first. We'll mark it as external in + # the test to check if the associated paths are placed last. + assert 'dt-diamond-left' < 'dt-diamond-right' + + cfg_data = syaml.load_config("""\ +dt-diamond-left: + externals: + - spec: dt-diamond-left@1.0 + prefix: /fake/path1 + buildable: false +""") + spack.config.set("packages", cfg_data) + top = spack.spec.Spec('dt-diamond').concretized() + + def _trust_me_its_a_dir(path): + return True + monkeypatch.setattr( + os.path, 'isdir', _trust_me_its_a_dir + ) + + env_mods = EnvironmentModifications() + spack.build_environment.set_build_environment_variables( + top.package, env_mods, False) + + env_mods.apply_modifications() + link_dir_var = os.environ['SPACK_LINK_DIRS'] + link_dirs = link_dir_var.split(':') + external_lib_paths = set(['/fake/path1/lib', '/fake/path1/lib64']) + # The external lib paths should be the last two entries of the list and + # should not appear anywhere before the last two entries + assert (set(os.path.normpath(x) for x in link_dirs[-2:]) == + external_lib_paths) + assert not (set(os.path.normpath(x) for x in link_dirs[:-2]) & + external_lib_paths) + + def test_parallel_false_is_not_propagating(config, mock_packages): class AttributeHolder(object): pass -- cgit v1.2.3-60-g2f50 From 8a7bfe97c31b5c5b16dc5a79dba6adbb91c618ea Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 21 Apr 2021 10:02:10 +0200 Subject: ASP-based solver: suppress warnings when constructing facts (#23090) fixes #22786 Trying to get optimization flags for a specific target from a compiler may trigger warnings. In the context of constructing facts for the ASP-based solver we don't want to show these warnings to the user, so here we simply ignore them. --- lib/spack/spack/solver/asp.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 9ccbbaf9b1..30de581cbd 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -12,6 +12,7 @@ import pprint import sys import time import types +import warnings from six import string_types import archspec.cpu @@ -1023,7 +1024,9 @@ class SpackSolverSetup(object): for target in targets: try: - target.optimization_flags(compiler_name, compiler_version) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + target.optimization_flags(compiler_name, compiler_version) supported.append(target) except archspec.cpu.UnsupportedMicroarchitecture: continue -- cgit v1.2.3-60-g2f50 From 0d173bb32bebd2eb6b907268b6dfdf4982cc2f1b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 4 May 2021 07:26:48 +0200 Subject: Use Python's built-in machinery to import compilers (#23290) --- lib/spack/spack/compilers/__init__.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 3c1f9ffa44..85e0275a29 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -22,11 +22,10 @@ import spack.error import spack.spec import spack.config import spack.architecture -import spack.util.imp as simp + from spack.util.environment import get_path from spack.util.naming import mod_to_class -_imported_compilers_module = 'spack.compilers' _path_instance_vars = ['cc', 'cxx', 'f77', 'fc'] _flags_instance_vars = ['cflags', 'cppflags', 'cxxflags', 'fflags'] _other_instance_vars = ['modules', 'operating_system', 'environment', @@ -470,17 +469,17 @@ def get_compiler_duplicates(compiler_spec, arch_spec): @llnl.util.lang.memoized def class_for_compiler_name(compiler_name): """Given a compiler module name, get the corresponding Compiler class.""" - assert(supported(compiler_name)) + assert supported(compiler_name) # Hack to be able to call the compiler `apple-clang` while still # using a valid python name for the module - module_name = compiler_name + submodule_name = compiler_name if compiler_name == 'apple-clang': - module_name = compiler_name.replace('-', '_') + submodule_name = compiler_name.replace('-', '_') - file_path = os.path.join(spack.paths.compilers_path, module_name + ".py") - compiler_mod = simp.load_source(_imported_compilers_module, file_path) - cls = getattr(compiler_mod, mod_to_class(compiler_name)) + module_name = '.'.join(['spack', 'compilers', submodule_name]) + module_obj = __import__(module_name, fromlist=[None]) + cls = getattr(module_obj, mod_to_class(compiler_name)) # make a note of the name in the module so we can get to it easily. cls.name = compiler_name -- cgit v1.2.3-60-g2f50 From 4a7581eda346389cf11bb427b488d71340b5527b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 29 Mar 2021 17:31:24 +0200 Subject: Add "spack [cd|location] --source-dir" (#22321) --- lib/spack/spack/cmd/location.py | 120 ++++++++++++++++++++--------------- lib/spack/spack/test/cmd/location.py | 16 +++-- share/spack/spack-completion.bash | 4 +- 3 files changed, 82 insertions(+), 58 deletions(-) diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index 4981056265..d26587c8c4 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -47,9 +47,13 @@ def setup_parser(subparser): directories.add_argument( '-S', '--stages', action='store_true', help="top level stage directory") + directories.add_argument( + '--source-dir', action='store_true', + help="source directory for a spec " + "(requires it to be staged first)") directories.add_argument( '-b', '--build-dir', action='store_true', - help="checked out or expanded source directory for a spec " + help="build directory for a spec " "(requires it to be staged first)") directories.add_argument( '-e', '--env', action='store', @@ -61,64 +65,78 @@ def setup_parser(subparser): def location(parser, args): if args.module_dir: print(spack.paths.module_path) + return - elif args.spack_root: + if args.spack_root: print(spack.paths.prefix) + return - elif args.env: + if args.env: path = spack.environment.root(args.env) if not os.path.isdir(path): tty.die("no such environment: '%s'" % args.env) print(path) + return - elif args.packages: + if args.packages: print(spack.repo.path.first_repo().root) + return - elif args.stages: + if args.stages: print(spack.stage.get_stage_root()) - - else: - specs = spack.cmd.parse_specs(args.spec) - if not specs: - tty.die("You must supply a spec.") - if len(specs) != 1: - tty.die("Too many specs. Supply only one.") - - if args.install_dir: - # install_dir command matches against installed specs. - env = ev.get_env(args, 'location') - spec = spack.cmd.disambiguate_spec(specs[0], env) - print(spec.prefix) - - else: - spec = specs[0] - - if args.package_dir: - # This one just needs the spec name. - print(spack.repo.path.dirname_for_package_name(spec.name)) - - else: - spec = spack.cmd.matching_spec_from_env(spec) - pkg = spec.package - - if args.stage_dir: - print(pkg.stage.path) - - else: # args.build_dir is the default. - if not pkg.stage.expanded: - tty.die("Build directory does not exist yet. " - "Run this to create it:", - "spack stage " + " ".join(args.spec)) - - # Out of source builds have build_directory defined - if hasattr(pkg, 'build_directory'): - # build_directory can be either absolute or relative - # to the stage path in either case os.path.join makes it - # absolute - print(os.path.normpath(os.path.join( - pkg.stage.path, - pkg.build_directory - ))) - else: - # Otherwise assume in-source builds - return print(pkg.stage.source_path) + return + + specs = spack.cmd.parse_specs(args.spec) + + if not specs: + tty.die("You must supply a spec.") + + if len(specs) != 1: + tty.die("Too many specs. Supply only one.") + + # install_dir command matches against installed specs. + if args.install_dir: + env = ev.get_env(args, 'location') + spec = spack.cmd.disambiguate_spec(specs[0], env) + print(spec.prefix) + return + + spec = specs[0] + + # Package dir just needs the spec name + if args.package_dir: + print(spack.repo.path.dirname_for_package_name(spec.name)) + return + + # Either concretize or filter from already concretized environment + spec = spack.cmd.matching_spec_from_env(spec) + pkg = spec.package + + if args.stage_dir: + print(pkg.stage.path) + return + + if args.build_dir: + # Out of source builds have build_directory defined + if hasattr(pkg, 'build_directory'): + # build_directory can be either absolute or relative to the stage path + # in either case os.path.join makes it absolute + print(os.path.normpath(os.path.join( + pkg.stage.path, + pkg.build_directory + ))) + return + + # Otherwise assume in-source builds + print(pkg.stage.source_path) + return + + # source and build dir remain, they require the spec to be staged + if not pkg.stage.expanded: + tty.die("Source directory does not exist yet. " + "Run this to create it:", + "spack stage " + " ".join(args.spec)) + + if args.source_dir: + print(pkg.stage.source_path) + return diff --git a/lib/spack/spack/test/cmd/location.py b/lib/spack/spack/test/cmd/location.py index 3977cba3e5..9c47c2253a 100644 --- a/lib/spack/spack/test/cmd/location.py +++ b/lib/spack/spack/test/cmd/location.py @@ -52,18 +52,24 @@ def test_location_build_dir(mock_spec): assert location('--build-dir', spec.name).strip() == pkg.stage.source_path -def test_location_build_dir_missing(): - """Tests spack location --build-dir with a missing build directory.""" +def test_location_source_dir(mock_spec): + """Tests spack location --source-dir.""" + spec, pkg = mock_spec + assert location('--source-dir', spec.name).strip() == pkg.stage.source_path + + +def test_location_source_dir_missing(): + """Tests spack location --source-dir with a missing source directory.""" spec = 'mpileaks' prefix = "==> Error: " - expected = "%sBuild directory does not exist yet. Run this to create it:"\ + expected = "%sSource directory does not exist yet. Run this to create it:"\ "%s spack stage %s" % (prefix, os.linesep, spec) - out = location('--build-dir', spec, fail_on_error=False).strip() + out = location('--source-dir', spec, fail_on_error=False).strip() assert out == expected @pytest.mark.parametrize('options', [([]), - (['--build-dir', 'mpileaks']), + (['--source-dir', 'mpileaks']), (['--env', 'missing-env']), (['spec1', 'spec2'])]) def test_location_cmd_error(options): diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 46e92a0bd5..64dfa37d6b 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -440,7 +440,7 @@ _spack_buildcache_update_index() { _spack_cd() { if $list_options then - SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages -b --build-dir -e --env" + SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages --source-dir -b --build-dir -e --env" else _all_packages fi @@ -1064,7 +1064,7 @@ _spack_load() { _spack_location() { if $list_options then - SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages -b --build-dir -e --env" + SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages --source-dir -b --build-dir -e --env" else _all_packages fi -- cgit v1.2.3-60-g2f50 From fb27c7ad0c5542c10de98de6c825fbd0c7c8a88f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 6 Apr 2021 10:17:58 +0200 Subject: spack location: fix usage without args (#22755) --- lib/spack/spack/cmd/location.py | 7 +++---- lib/spack/spack/test/cmd/location.py | 2 ++ 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index d26587c8c4..3b72de978b 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -131,12 +131,11 @@ def location(parser, args): print(pkg.stage.source_path) return - # source and build dir remain, they require the spec to be staged + # source dir remains, which requires the spec to be staged if not pkg.stage.expanded: tty.die("Source directory does not exist yet. " "Run this to create it:", "spack stage " + " ".join(args.spec)) - if args.source_dir: - print(pkg.stage.source_path) - return + # Default to source dir. + print(pkg.stage.source_path) diff --git a/lib/spack/spack/test/cmd/location.py b/lib/spack/spack/test/cmd/location.py index 9c47c2253a..2cefbd558f 100644 --- a/lib/spack/spack/test/cmd/location.py +++ b/lib/spack/spack/test/cmd/location.py @@ -52,10 +52,12 @@ def test_location_build_dir(mock_spec): assert location('--build-dir', spec.name).strip() == pkg.stage.source_path +@pytest.mark.regression('22738') def test_location_source_dir(mock_spec): """Tests spack location --source-dir.""" spec, pkg = mock_spec assert location('--source-dir', spec.name).strip() == pkg.stage.source_path + assert location(spec.name).strip() == pkg.stage.source_path def test_location_source_dir_missing(): -- cgit v1.2.3-60-g2f50 From 13fed376f20f11221b83a806cc66e4ae82918c4d Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 28 Apr 2021 01:55:07 +0200 Subject: Import hooks using Python's built-in machinery (#23288) The function we coded in Spack to load Python modules with arbitrary names from a file seem to have issues with local imports. For loading hooks though it is unnecessary to use such functions, since we don't care to bind a custom name to a module nor we have to load it from an unknown location. This PR thus modifies spack.hook in the following ways: - Use __import__ instead of spack.util.imp.load_source (this addresses #20005) - Sync module docstring with all the hooks we have - Avoid using memoization in a module function - Marked with a leading underscore all the names that are supposed to stay local --- lib/spack/spack/hooks/__init__.py | 67 ++++++++++++++++++++++----------------- 1 file changed, 38 insertions(+), 29 deletions(-) diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 5a39f242fd..5a5a04b49f 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -2,8 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - """This package contains modules with hooks for various stages in the + Spack install process. You can add modules here and they'll be executed by package at various times during the package lifecycle. @@ -21,46 +21,55 @@ systems (e.g. modules, lmod, etc.) or to add other custom features. """ -import os.path - +import llnl.util.lang import spack.paths -import spack.util.imp as simp -from llnl.util.lang import memoized, list_modules - -@memoized -def all_hook_modules(): - modules = [] - for name in list_modules(spack.paths.hooks_path): - mod_name = __name__ + '.' + name - path = os.path.join(spack.paths.hooks_path, name) + ".py" - mod = simp.load_source(mod_name, path) - if name == 'write_install_manifest': - last_mod = mod - else: - modules.append(mod) - - # put `write_install_manifest` as the last hook to run - modules.append(last_mod) - return modules - - -class HookRunner(object): +class _HookRunner(object): + #: Stores all hooks on first call, shared among + #: all HookRunner objects + _hooks = None def __init__(self, hook_name): self.hook_name = hook_name + @classmethod + def _populate_hooks(cls): + # Lazily populate the list of hooks + cls._hooks = [] + relative_names = list(llnl.util.lang.list_modules( + spack.paths.hooks_path + )) + + # We want this hook to be the last registered + relative_names.sort(key=lambda x: x == 'write_install_manifest') + assert relative_names[-1] == 'write_install_manifest' + + for name in relative_names: + module_name = __name__ + '.' + name + # When importing a module from a package, __import__('A.B', ...) + # returns package A when 'fromlist' is empty. If fromlist is not + # empty it returns the submodule B instead + # See: https://stackoverflow.com/a/2725668/771663 + module_obj = __import__(module_name, fromlist=[None]) + cls._hooks.append((module_name, module_obj)) + + @property + def hooks(self): + if not self._hooks: + self._populate_hooks() + return self._hooks + def __call__(self, *args, **kwargs): - for module in all_hook_modules(): + for _, module in self.hooks: if hasattr(module, self.hook_name): hook = getattr(module, self.hook_name) if hasattr(hook, '__call__'): hook(*args, **kwargs) -pre_install = HookRunner('pre_install') -post_install = HookRunner('post_install') +pre_install = _HookRunner('pre_install') +post_install = _HookRunner('post_install') -pre_uninstall = HookRunner('pre_uninstall') -post_uninstall = HookRunner('post_uninstall') +pre_uninstall = _HookRunner('pre_uninstall') +post_uninstall = _HookRunner('post_uninstall') -- cgit v1.2.3-60-g2f50 From 30dd61264a69eb9385a07fa4e263f0e74719ed1f Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 6 May 2021 19:19:10 +0200 Subject: ASP-based solver: no intermediate package for concretizing together (#23307) The ASP-based solver can natively manage cases where more than one root spec is given, and is able to concretize all the roots together (ensuring one spec per package at most). Modifications: - [x] When concretising together an environment the ASP-based solver calls directly its `solve` method rather than constructing a temporary fake root package. --- lib/spack/spack/concretize.py | 18 ++++++++++++++++++ lib/spack/spack/test/cmd/env.py | 15 +++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 7e180eb91e..7c00b4dd55 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -724,6 +724,24 @@ def concretize_specs_together(*abstract_specs): Returns: List of concretized specs """ + if spack.config.get('config:concretizer') == 'original': + return _concretize_specs_together_original(*abstract_specs, **kwargs) + return _concretize_specs_together_new(*abstract_specs, **kwargs) + + +def _concretize_specs_together_new(*abstract_specs, **kwargs): + import spack.solver.asp + result = spack.solver.asp.solve(abstract_specs) + + if not result.satisfiable: + result.print_cores() + tty.die("Unsatisfiable spec.") + + opt, i, answer = min(result.answers) + return [answer[s.name].copy() for s in abstract_specs] + + +def _concretize_specs_together_original(*abstract_specs, **kwargs): def make_concretization_repository(abstract_specs): """Returns the path to a temporary repository created to contain a fake package that depends on all of the abstract specs. diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 44b5caf5f9..f43ea4ff32 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -2369,3 +2369,18 @@ spack: e.clear() e.write() assert os.path.exists(str(spack_lock)) + + +@pytest.mark.regression('23440') +def test_custom_version_concretize_together(tmpdir): + # Custom versions should be permitted in specs when + # concretizing together + e = ev.create('custom_version') + e.concretization = 'together' + + # Concretize a first time using 'mpich' as the MPI provider + e.add('hdf5@myversion') + e.add('mpich') + e.concretize() + + assert any('hdf5@myversion' in spec for _, spec in e.concretized_specs()) -- cgit v1.2.3-60-g2f50 From 76c5a02125b31a7d50dab10a51d526c8594aefb6 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 21 Apr 2021 10:02:43 +0200 Subject: ASP-based solve: minimize compiler mismatches (#23016) fixes #22718 Instead of trying to maximize the number of matches (preferred behavior), try to minimize the number of mismatches (unwanted behavior). --- lib/spack/spack/concretize.py | 2 +- lib/spack/spack/solver/concretize.lp | 47 ++++++++++++------------------------ lib/spack/spack/test/concretize.py | 11 +++++++++ 3 files changed, 28 insertions(+), 32 deletions(-) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 7c00b4dd55..48210f0959 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -714,7 +714,7 @@ def _compiler_concretization_failure(compiler_spec, arch): raise UnavailableCompilerVersionError(compiler_spec, arch) -def concretize_specs_together(*abstract_specs): +def concretize_specs_together(*abstract_specs, **kwargs): """Given a number of specs as input, tries to concretize them together. Args: diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 3d250ac821..145c00ab67 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -597,36 +597,24 @@ node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(P not compiler_supports_os(Compiler, Version, OS), not allow_compiler(Compiler, Version). -% If the compiler is what was prescribed from command line etc. -% or is the same as a root node, there is a version match - -% Compiler prescribed in the root spec -node_compiler_version_match_pref(Package, Compiler, V) - :- node_compiler_set(Package, Compiler), - node_compiler_version(Package, Compiler, V), - not external(Package). - -% Compiler inherited from a parent node -node_compiler_version_match_pref(Dependency, Compiler, V) - :- depends_on(Package, Dependency), - node_compiler_version_match_pref(Package, Compiler, V), - node_compiler_version(Dependency, Compiler, V), - not node_compiler_set(Dependency, Compiler). - -% Compiler inherited from the root package -node_compiler_version_match_pref(Dependency, Compiler, V) - :- depends_on(Package, Dependency), - node_compiler_version(Package, Compiler, V), root(Package), - node_compiler_version(Dependency, Compiler, V), - not node_compiler_set(Dependency, Compiler). +% If a package and one of its dependencies don't have the +% same compiler there's a mismatch. +compiler_mismatch(Package, Dependency) + :- depends_on(Package, Dependency), + node_compiler_version(Package, Compiler1, _), + node_compiler_version(Dependency, Compiler2, _), + Compiler1 != Compiler2. -compiler_version_match(Package, 1) - :- node_compiler_version(Package, Compiler, V), - node_compiler_version_match_pref(Package, Compiler, V). +compiler_mismatch(Package, Dependency) + :- depends_on(Package, Dependency), + node_compiler_version(Package, Compiler, Version1), + node_compiler_version(Dependency, Compiler, Version2), + Version1 != Version2. #defined node_compiler_set/2. #defined node_compiler_version_set/3. #defined compiler_supports_os/3. +#defined compiler_version_match/2. #defined allow_compiler/2. % compilers weighted by preference according to packages.yaml @@ -766,12 +754,9 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency). not root(Package) }. -% Try to maximize the number of compiler matches in the DAG, -% while minimizing the number of nodes. This is done because -% a maximization on the number of matches for compilers is highly -% correlated to a preference to have as many nodes as possible -#minimize{ 1@7,Package : node(Package) }. -#maximize{ Weight@7,Package : compiler_version_match(Package, Weight) }. +% Try to minimize the number of compiler mismatches in the DAG. +#minimize{ 0@7 : #true }. +#minimize{ 1@7,Package,Dependency : compiler_mismatch(Package, Dependency) }. % Choose more recent versions for nodes #minimize{ diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 56e111e220..6f87381e3f 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1171,3 +1171,14 @@ class TestConcretize(object): for node in s.traverse(): assert node.satisfies(expected_os) + + @pytest.mark.regression('22718') + @pytest.mark.parametrize('spec_str,expected_compiler', [ + ('mpileaks', '%gcc@4.5.0'), + ('mpileaks ^mpich%clang@3.3', '%clang@3.3') + ]) + def test_compiler_is_unique(self, spec_str, expected_compiler): + s = Spec(spec_str).concretized() + + for node in s.traverse(): + assert node.satisfies(expected_compiler) -- cgit v1.2.3-60-g2f50 From 818664d55aa324430e770188f1116248f4966e68 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 17 May 2021 01:20:17 -0700 Subject: performance: speed up existence checks in packages (#23661) Spack doesn't require users to manually index their repos; it reindexes the indexes automatically when things change. To determine when to do this, it has to `stat()` all package files in each repository to make sure that indexes up to date with packages. We currently index virtual providers, patches by sha256, and tags on packages. When this was originally implemented, we ran the checker all the time, at startup, but that was slow (see #7587). But we didn't go far enough -- it still consults the checker and does all the stat operations just to see if a package exists (`Repo.exists()`). That might've been a wash in 2018, but as the number of packages has grown, it's gotten slower -- checking 5k packages is expensive and users see this for small operations. It's a win now to make `Repo.exists()` check files directly. **Fix:** This PR does a number of things to speed up `spack load`, `spack info`, and other commands: - [x] Make `Repo.exists()` check files directly again with `os.path.exists()` (this is the big one) - [x] Refactor `Spec.satisfies()` so that a checking for virtual packages only happens if needed (avoids some calls to exists()) - [x] Avoid calling `Repo.exists(spec)` in `Repo.get()`. `Repo.get()` will ultimately try to load a `package.py` file anyway; we can let the failure to load it indicate that the package doesn't exist, and avoid another call to exists(). - [x] Fix up some comments in spec parsing - [x] Call `UnknownPackageError` more consistently in `repo.py` --- lib/spack/spack/repo.py | 21 +++++++++++++++++---- lib/spack/spack/spec.py | 37 ++++++++++++++++++------------------- lib/spack/spack/test/cmd/pkg.py | 3 ++- 3 files changed, 37 insertions(+), 24 deletions(-) diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 52b44e0120..1534054075 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -914,8 +914,12 @@ class Repo(object): @autospec def get(self, spec): """Returns the package associated with the supplied spec.""" - if not self.exists(spec.name): - raise UnknownPackageError(spec.name) + # NOTE: we only check whether the package is None here, not whether it + # actually exists, because we have to load it anyway, and that ends up + # checking for existence. We avoid constructing FastPackageChecker, + # which will stat all packages. + if spec.name is None: + raise UnknownPackageError(None, self) if spec.namespace and spec.namespace != self.namespace: raise UnknownPackageError(spec.name, self.namespace) @@ -1060,7 +1064,16 @@ class Repo(object): def exists(self, pkg_name): """Whether a package with the supplied name exists.""" - return pkg_name in self._pkg_checker + if pkg_name is None: + return False + + # if the FastPackageChecker is already constructed, use it + if self._fast_package_checker: + return pkg_name in self._pkg_checker + + # if not, check for the package.py file + path = self.filename_for_package_name(pkg_name) + return os.path.exists(path) def last_mtime(self): """Time a package file in this repo was last updated.""" @@ -1327,7 +1340,7 @@ class UnknownPackageError(UnknownEntityError): long_msg = None if name: if repo: - msg = "Package '{0}' not found in repository '{1}'" + msg = "Package '{0}' not found in repository '{1.root}'" msg = msg.format(name, repo) else: msg = "Package '{0}' not found.".format(name) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 609f05ca03..edb2c94300 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -3148,25 +3148,23 @@ class Spec(object): if other.concrete: return self.concrete and self.dag_hash() == other.dag_hash() - # A concrete provider can satisfy a virtual dependency. - if not self.virtual and other.virtual: - try: - pkg = spack.repo.get(self.fullname) - except spack.repo.UnknownEntityError: - # If we can't get package info on this spec, don't treat - # it as a provider of this vdep. - return False - - if pkg.provides(other.name): - for provided, when_specs in pkg.provided.items(): - if any(self.satisfies(when_spec, deps=False, strict=strict) - for when_spec in when_specs): - if provided.satisfies(other): - return True - return False - - # Otherwise, first thing we care about is whether the name matches + # If the names are different, we need to consider virtuals if self.name != other.name and self.name and other.name: + # A concrete provider can satisfy a virtual dependency. + if not self.virtual and other.virtual: + try: + pkg = spack.repo.get(self.fullname) + except spack.repo.UnknownEntityError: + # If we can't get package info on this spec, don't treat + # it as a provider of this vdep. + return False + + if pkg.provides(other.name): + for provided, when_specs in pkg.provided.items(): + if any(self.satisfies(when, deps=False, strict=strict) + for when in when_specs): + if provided.satisfies(other): + return True return False # namespaces either match, or other doesn't require one. @@ -4513,7 +4511,8 @@ class SpecParser(spack.parse.Parser): break elif self.accept(HASH): - # Get spec by hash and confirm it matches what we already have + # Get spec by hash and confirm it matches any constraints we + # already read in hash_spec = self.spec_by_hash() if hash_spec.satisfies(spec): spec._dup(hash_spec) diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index 52127e8605..5319e4e288 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -129,7 +129,8 @@ def test_pkg_add(mock_pkg_git_repo): finally: shutil.rmtree('pkg-e') # Removing a package mid-run disrupts Spack's caching - spack.repo.path.repos[0]._fast_package_checker.invalidate() + if spack.repo.path.repos[0]._fast_package_checker: + spack.repo.path.repos[0]._fast_package_checker.invalidate() with pytest.raises(spack.main.SpackCommandError): pkg('add', 'does-not-exist') -- cgit v1.2.3-60-g2f50 From 80f0c78f00732024f86b58485b872bc43bc428be Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 21 May 2021 21:29:30 +0200 Subject: Style fixes for v0.16.2 release --- lib/spack/spack/bootstrap.py | 4 +++- lib/spack/spack/cmd/location.py | 4 ++-- lib/spack/spack/solver/asp.py | 4 +++- lib/spack/spack/store.py | 3 ++- lib/spack/spack/test/cmd/undevelop.py | 4 +++- lib/spack/spack/util/environment.py | 3 ++- lib/spack/spack/util/executable.py | 10 ++++++---- share/spack/spack-completion.bash | 2 +- 8 files changed, 22 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py index ce9b720163..f8d0703174 100644 --- a/lib/spack/spack/bootstrap.py +++ b/lib/spack/spack/bootstrap.py @@ -198,7 +198,9 @@ def get_executable(exe, spec=None, install=False): def _bootstrap_config_scopes(): tty.debug('[BOOTSTRAP CONFIG SCOPE] name=_builtin') config_scopes = [ - spack.config.InternalConfigScope('_builtin', spack.config.config_defaults) + spack.config.InternalConfigScope( + '_builtin', spack.config.config_defaults + ) ] for name, path in spack.config.configuration_paths: platform = spack.architecture.platform().name diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index 3b72de978b..650e8a477a 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -119,8 +119,8 @@ def location(parser, args): if args.build_dir: # Out of source builds have build_directory defined if hasattr(pkg, 'build_directory'): - # build_directory can be either absolute or relative to the stage path - # in either case os.path.join makes it absolute + # build_directory can be either absolute or relative to the + # stage path in either case os.path.join makes it absolute print(os.path.normpath(os.path.join( pkg.stage.path, pkg.build_directory diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 30de581cbd..dbfca6506d 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -492,7 +492,9 @@ class SpackSolverSetup(object): def conflict_rules(self, pkg): for trigger, constraints in pkg.conflicts.items(): - trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name) + trigger_id = self.condition( + spack.spec.Spec(trigger), name=pkg.name + ) self.gen.fact(fn.conflict_trigger(trigger_id)) for constraint, _ in constraints: diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py index 6dbd2befb9..157ba00be6 100644 --- a/lib/spack/spack/store.py +++ b/lib/spack/spack/store.py @@ -258,7 +258,8 @@ def use_store(store_or_path): """Use the store passed as argument within the context manager. Args: - store_or_path: either a Store object ot a path to where the store resides + store_or_path: either a Store object ot a path to where the + store resides Returns: Store object associated with the context manager's store diff --git a/lib/spack/spack/test/cmd/undevelop.py b/lib/spack/spack/test/cmd/undevelop.py index 493bb99228..2e897e77f3 100644 --- a/lib/spack/spack/test/cmd/undevelop.py +++ b/lib/spack/spack/test/cmd/undevelop.py @@ -39,7 +39,9 @@ env: assert not after.satisfies('dev_path=*') -def test_undevelop_nonexistent(tmpdir, config, mock_packages, mutable_mock_env_path): +def test_undevelop_nonexistent( + tmpdir, config, mock_packages, mutable_mock_env_path +): # setup environment envdir = tmpdir.mkdir('env') with envdir.as_cwd(): diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index c4e7ea3260..6d299fa7ef 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -943,7 +943,8 @@ def environment_after_sourcing_files(*files, **kwargs): concatenate_on_success, dump_environment, ]) output = shell( - source_file_arguments, output=str, env=environment, ignore_quotes=True + source_file_arguments, output=str, env=environment, + ignore_quotes=True ) environment = json.loads(output) diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index c01ae1b61b..8baea6d238 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -92,8 +92,8 @@ class Executable(object): ignore_errors (int or list): A list of error codes to ignore. If these codes are returned, this process will not raise an exception even if ``fail_on_error`` is set to ``True`` - ignore_quotes (bool): If False, warn users that quotes are not needed - as Spack does not use a shell. Defaults to False. + ignore_quotes (bool): If False, warn users that quotes are not + needed as Spack does not use a shell. Defaults to False. input: Where to read stdin from output: Where to send stdout error: Where to send stderr @@ -168,12 +168,14 @@ class Executable(object): istream, close_istream = streamify(input, 'r') if not ignore_quotes: - quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)] + quoted_args = [ + arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg) + ] if quoted_args: tty.warn( "Quotes in command arguments can confuse scripts like" " configure.", - "The following arguments may cause problems when executed:", + "These arguments may cause problems when executed:", str("\n".join([" " + arg for arg in quoted_args])), "Quotes aren't needed because spack doesn't use a shell. " "Consider removing them.", diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 64dfa37d6b..5cf32c0cab 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1,4 +1,4 @@ -# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -- cgit v1.2.3-60-g2f50 From f1fe03cd5988acdf48dafd23f2cb1431a51e7b96 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 22 May 2021 11:30:57 -0700 Subject: Update CHANGELOG and release version for v0.16.2 --- CHANGELOG.md | 27 +++++++++++++++++++++++++++ lib/spack/spack/__init__.py | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af98053f6d..d67ac073d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,30 @@ +# v0.16.2 (2021-05-22) + +* Major performance improvement for `spack load` and other commands. (#23661) +* `spack fetch` is now environment-aware. (#19166) +* Numerous fixes for the new, `clingo`-based concretizer. (#23016, #23307, + #23090, #22896, #22534, #20644, #20537, #21148) +* Supoprt for automatically bootstrapping `clingo` from source. (#20652, #20657 + #21364, #21446, #21913, #22354, #22444, #22460, #22489, #22610, #22631) +* Python 3.10 support: `collections.abc` (#20441) +* Fix import issues by using `__import__` instead of Spack package importe. + (#23288, #23290) +* Bugfixes and `--source-dir` argument for `spack location`. (#22755, #22348, + #22321) +* Better support for externals in shared prefixes. (#22653) +* `spack build-env` now prefers specs defined in the active environment. + (#21642) +* Remove erroneous warnings about quotes in `from_sourcing_files`. (#22767) +* Fix clearing cache of `InternalConfigScope`. (#22609) +* Bugfix for active when pkg is already active error. (#22587) +* Make `SingleFileScope` able to repopulate the cache after clearing it. + (#22559) +* Channelflow: Fix the package. (#22483) +* More descriptive error message for bugs in `package.py` (#21811) +* Use package-supplied `autogen.sh`. (#20319) +* Respect `-k/verify-ssl-false` in `_existing_url` method. (#21864) + + # v0.16.1 (2021-02-22) This minor release includes a new feature and associated fixes: diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 21e0d3f863..cefee1203f 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -5,7 +5,7 @@ #: major, minor, patch version for Spack, in a tuple -spack_version_info = (0, 16, 1) +spack_version_info = (0, 16, 2) #: String containing Spack version joined with .'s spack_version = '.'.join(str(v) for v in spack_version_info) -- cgit v1.2.3-60-g2f50