summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTamara Dahlgren <35777542+tldahlgren@users.noreply.github.com>2020-11-17 02:41:07 -0800
committerGitHub <noreply@github.com>2020-11-17 02:41:07 -0800
commit6fa6af1070908895b77d0ee35650b499971e2719 (patch)
treecf7abb82084026a8b57105fbe7b18cece9a7776a
parent423e80af23cc9ff54b14fc268434c38fc9f7f050 (diff)
downloadspack-6fa6af1070908895b77d0ee35650b499971e2719.tar.gz
spack-6fa6af1070908895b77d0ee35650b499971e2719.tar.bz2
spack-6fa6af1070908895b77d0ee35650b499971e2719.tar.xz
spack-6fa6af1070908895b77d0ee35650b499971e2719.zip
Support parallel environment builds (#18131)
As of #13100, Spack installs the dependencies of a _single_ spec in parallel. Environments, when installed, can only get parallelism from each individual spec, as they're installed in order. This PR makes entire environments build in parallel by extending Spack's package installer to accept multiple root specs. The install command and Environment class have been updated to use the new parallel install method. The specs and kwargs for each *uninstalled* package (when not force-replacing installations) of an environment are collected, passed to the `PackageInstaller`, and processed using a single build queue. This introduces a `BuildRequest` class to track install arguments, and it significantly cleans up the code used to track package ids during installation. Package ids in the build queue are now just DAG hashes as you would expect, Other tasks: - [x] Finish updating the unit tests based on `PackageInstaller`'s use of `BuildRequest` and the associated changes - [x] Change `environment.py`'s `install_all` to use the `PackageInstaller` directly - [x] Change the `install` command to leverage the new installation process for multiple specs - [x] Change install output messages for external packages, e.g.: `[+] /usr` -> `[+] /usr (external bzip2-1.0.8-<dag-hash>` - [x] Fix incomplete environment install's view setup/update and not confirming all packages are installed (?) - [x] Ensure externally installed package dependencies are properly accounted for in remaining build tasks - [x] Add tests for coverage (if insufficient and can identity the appropriate, uncovered non-comment lines) - [x] Add documentation - [x] Resolve multi-compiler environment install issues - [x] Fix issue with environment installation reporting (restore CDash/JUnit reports)
-rw-r--r--lib/spack/docs/basic_usage.rst32
-rw-r--r--lib/spack/docs/environments.rst47
-rw-r--r--lib/spack/docs/packaging_guide.rst49
-rwxr-xr-xlib/spack/env/cc2
-rw-r--r--lib/spack/spack/cmd/install.py61
-rw-r--r--lib/spack/spack/environment.py60
-rw-r--r--lib/spack/spack/installer.py802
-rw-r--r--lib/spack/spack/package.py41
-rw-r--r--lib/spack/spack/report.py4
-rw-r--r--lib/spack/spack/test/buildrequest.py55
-rw-r--r--lib/spack/spack/test/buildtask.py20
-rw-r--r--lib/spack/spack/test/cmd/dev_build.py16
-rw-r--r--lib/spack/spack/test/cmd/env.py55
-rw-r--r--lib/spack/spack/test/cmd/install.py33
-rw-r--r--lib/spack/spack/test/install.py2
-rw-r--r--lib/spack/spack/test/installer.py477
-rw-r--r--var/spack/repos/builtin.mock/packages/depb/package.py22
17 files changed, 1222 insertions, 556 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 221516238e..f846cdabe4 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -132,32 +132,28 @@ If ``mpileaks`` depends on other packages, Spack will install the
dependencies first. It then fetches the ``mpileaks`` tarball, expands
it, verifies that it was downloaded without errors, builds it, and
installs it in its own directory under ``$SPACK_ROOT/opt``. You'll see
-a number of messages from spack, a lot of build output, and a message
-that the packages is installed:
+a number of messages from Spack, a lot of build output, and a message
+that the package is installed. Add one or more debug options (``-d``)
+to get increasingly detailed output.
.. code-block:: console
$ spack install mpileaks
- ==> Installing mpileaks
- ==> mpich is already installed in ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4.
- ==> callpath is already installed in ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318.
- ==> adept-utils is already installed in ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da.
- ==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
- ######################################################################## 100.0%
- ==> Staging archive: ~/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23/mpileaks-1.0.tar.gz
- ==> Created stage in ~/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23.
- ==> No patches needed for mpileaks.
- ==> Building mpileaks.
-
- ... build output ...
-
- ==> Successfully installed mpileaks.
- Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
- [+] ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpileaks@1.0-59f6ad23
+ ... dependency build output ...
+ ==> Installing mpileaks-1.0-ph7pbnhl334wuhogmugriohcwempqry2
+ ==> No binary for mpileaks-1.0-ph7pbnhl334wuhogmugriohcwempqry2 found: installing from source
+ ==> mpileaks: Executing phase: 'autoreconf'
+ ==> mpileaks: Executing phase: 'configure'
+ ==> mpileaks: Executing phase: 'build'
+ ==> mpileaks: Executing phase: 'install'
+ [+] ~/spack/opt/linux-rhel7-broadwell/gcc-8.1.0/mpileaks-1.0-ph7pbnhl334wuhogmugriohcwempqry2
The last line, with the ``[+]``, indicates where the package is
installed.
+Add the debug option -- ``spack install -d mpileaks`` -- to get additional
+output.
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Building a specific version
^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst
index 55ac458a3c..65eec36581 100644
--- a/lib/spack/docs/environments.rst
+++ b/lib/spack/docs/environments.rst
@@ -191,44 +191,24 @@ Environment has been activated. Similarly, the ``install`` and
==> 0 installed packages
$ spack install zlib@1.2.11
- ==> Installing zlib
- ==> Searching for binary cache of zlib
- ==> Warning: No Spack mirrors are currently configured
- ==> No binary for zlib found: installing from source
- ==> Fetching http://zlib.net/fossils/zlib-1.2.11.tar.gz
- ######################################################################## 100.0%
- ==> Staging archive: /spack/var/spack/stage/zlib-1.2.11-3r4cfkmx3wwfqeof4bc244yduu2mz4ur/zlib-1.2.11.tar.gz
- ==> Created stage in /spack/var/spack/stage/zlib-1.2.11-3r4cfkmx3wwfqeof4bc244yduu2mz4ur
- ==> No patches needed for zlib
- ==> Building zlib [Package]
- ==> Executing phase: 'install'
- ==> Successfully installed zlib
- Fetch: 0.36s. Build: 11.58s. Total: 11.93s.
- [+] /spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/zlib-1.2.11-3r4cfkmx3wwfqeof4bc244yduu2mz4ur
+ ==> Installing zlib-1.2.11-q6cqrdto4iktfg6qyqcc5u4vmfmwb7iv
+ ==> No binary for zlib-1.2.11-q6cqrdto4iktfg6qyqcc5u4vmfmwb7iv found: installing from source
+ ==> zlib: Executing phase: 'install'
+ [+] ~/spack/opt/spack/linux-rhel7-broadwell/gcc-8.1.0/zlib-1.2.11-q6cqrdto4iktfg6qyqcc5u4vmfmwb7iv
$ spack env activate myenv
$ spack find
==> In environment myenv
==> No root specs
-
==> 0 installed packages
$ spack install zlib@1.2.8
- ==> Installing zlib
- ==> Searching for binary cache of zlib
- ==> Warning: No Spack mirrors are currently configured
- ==> No binary for zlib found: installing from source
- ==> Fetching http://zlib.net/fossils/zlib-1.2.8.tar.gz
- ######################################################################## 100.0%
- ==> Staging archive: /spack/var/spack/stage/zlib-1.2.8-y2t6kq3s23l52yzhcyhbpovswajzi7f7/zlib-1.2.8.tar.gz
- ==> Created stage in /spack/var/spack/stage/zlib-1.2.8-y2t6kq3s23l52yzhcyhbpovswajzi7f7
- ==> No patches needed for zlib
- ==> Building zlib [Package]
- ==> Executing phase: 'install'
- ==> Successfully installed zlib
- Fetch: 0.26s. Build: 2.08s. Total: 2.35s.
- [+] /spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/zlib-1.2.8-y2t6kq3s23l52yzhcyhbpovswajzi7f7
+ ==> Installing zlib-1.2.8-yfc7epf57nsfn2gn4notccaiyxha6z7x
+ ==> No binary for zlib-1.2.8-yfc7epf57nsfn2gn4notccaiyxha6z7x found: installing from source
+ ==> zlib: Executing phase: 'install'
+ [+] ~/spack/opt/spack/linux-rhel7-broadwell/gcc-8.1.0/zlib-1.2.8-yfc7epf57nsfn2gn4notccaiyxha6z7x
+ ==> Updating view at ~/spack/var/spack/environments/myenv/.spack-env/view
$ spack find
==> In environment myenv
@@ -236,15 +216,17 @@ Environment has been activated. Similarly, the ``install`` and
zlib@1.2.8
==> 1 installed package
- -- linux-rhel7-x86_64 / gcc@4.9.3 -------------------------------
+ -- linux-rhel7-broadwell / gcc@8.1.0 ----------------------------
zlib@1.2.8
$ despacktivate
+
$ spack find
==> 2 installed packages
- -- linux-rhel7-x86_64 / gcc@4.9.3 -------------------------------
+ -- linux-rhel7-broadwell / gcc@8.1.0 ----------------------------
zlib@1.2.8 zlib@1.2.11
+
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
presented as a root of the Environment. All explicitly installed
packages will be listed as roots of the Environment.
@@ -349,6 +331,9 @@ installed specs using the ``-c`` (``--concretized``) flag.
==> 0 installed packages
+
+.. _installing-environment:
+
^^^^^^^^^^^^^^^^^^^^^^^^^
Installing an Environment
^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 66caddeb0b..836fc12b83 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1778,8 +1778,18 @@ RPATHs in Spack are handled in one of three ways:
Parallel builds
---------------
+Spack supports parallel builds on an individual package and at the
+installation level. Package-level parallelism is established by the
+``--jobs`` option and its configuration and package recipe equivalents.
+Installation-level parallelism is driven by the DAG(s) of the requested
+package or packages.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Package-level build parallelism
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
By default, Spack will invoke ``make()``, or any other similar tool,
-with a ``-j <njobs>`` argument, so that builds run in parallel.
+with a ``-j <njobs>`` argument, so those builds run in parallel.
The parallelism is determined by the value of the ``build_jobs`` entry
in ``config.yaml`` (see :ref:`here <build-jobs>` for more details on
how this value is computed).
@@ -1827,6 +1837,43 @@ you set ``parallel`` to ``False`` at the package level, then each call
to ``make()`` will be sequential by default, but packagers can call
``make(parallel=True)`` to override it.
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Install-level build parallelism
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Spack supports the concurrent installation of packages within a Spack
+instance across multiple processes using file system locks. This
+parallelism is separate from the package-level achieved through build
+systems' use of the ``-j <njobs>`` option. With install-level parallelism,
+processes coordinate the installation of the dependencies of specs
+provided on the command line and as part of an environment build with
+only **one process** being allowed to install a given package at a time.
+Refer to :ref:`Dependencies` for more information on dependencies and
+:ref:`installing-environment` for how to install an environment.
+
+Concurrent processes may be any combination of interactive sessions and
+batch jobs. Which means a ``spack install`` can be running in a terminal
+window while a batch job is running ``spack install`` on the same or
+overlapping dependencies without any process trying to re-do the work of
+another.
+
+For example, if you are using SLURM, you could launch an installation
+of ``mpich`` using the following command:
+
+.. code-block:: console
+
+ $ srun -N 2 -n 8 spack install -j 4 mpich@3.3.2
+
+This will create eight concurrent four-job installation on two different
+nodes.
+
+.. note::
+
+ The effective parallelism will be based on the maximum number of
+ packages that can be installed at the same time, which will limited
+ by the number of packages with no (remaining) uninstalled dependencies.
+
+
.. _dependencies:
------------
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index f23b22775b..5efe015c9e 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -22,7 +22,7 @@
# This is an array of environment variables that need to be set before
# the script runs. They are set by routines in spack.build_environment
-# as part of spack.package.Package.do_install().
+# as part of the package installation process.
parameters=(
SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index c2875e495b..8803a9f312 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -20,6 +20,7 @@ import spack.fetch_strategy
import spack.paths
import spack.report
from spack.error import SpackError
+from spack.installer import PackageInstaller
description = "build and install packages"
@@ -29,7 +30,7 @@ level = "short"
def update_kwargs_from_args(args, kwargs):
"""Parse cli arguments and construct a dictionary
- that will be passed to Package.do_install API"""
+ that will be passed to the package installer."""
kwargs.update({
'fail_fast': args.fail_fast,
@@ -238,23 +239,29 @@ def default_log_file(spec):
return fs.os.path.join(dirname, basename)
-def install_spec(cli_args, kwargs, abstract_spec, spec):
- """Do the actual installation."""
+def install_specs(cli_args, kwargs, specs):
+ """Do the actual installation.
+
+ Args:
+ cli_args (Namespace): argparse namespace with command arguments
+ kwargs (dict): keyword arguments
+ specs (list of tuples): list of (abstract, concrete) spec tuples
+ """
+
+ # handle active environment, if any
+ env = ev.get_env(cli_args, 'install')
try:
- # handle active environment, if any
- env = ev.get_env(cli_args, 'install')
if env:
- with env.write_transaction():
- concrete = env.concretize_and_add(
- abstract_spec, spec)
- env.write(regenerate_views=False)
- env._install(concrete, **kwargs)
- with env.write_transaction():
- env.regenerate_views()
+ for abstract, concrete in specs:
+ with env.write_transaction():
+ concrete = env.concretize_and_add(abstract, concrete)
+ env.write(regenerate_views=False)
+ env.install_all(cli_args, **kwargs)
else:
- spec.package.do_install(**kwargs)
-
+ installs = [(concrete.package, kwargs) for _, concrete in specs]
+ builder = PackageInstaller(installs)
+ builder.install()
except spack.build_environment.InstallError as e:
if cli_args.show_log_on_error:
e.print_context()
@@ -280,6 +287,10 @@ environment variables:
parser.print_help()
return
+ reporter = spack.report.collect_info(args.log_format, args)
+ if args.log_file:
+ reporter.filename = args.log_file
+
if not args.spec and not args.specfiles:
# if there are no args but an active environment
# then install the packages from it.
@@ -294,8 +305,17 @@ environment variables:
# once, as it can be slow.
env.write(regenerate_views=False)
- tty.msg("Installing environment %s" % env.name)
- env.install_all(args)
+ specs = env.all_specs()
+ if not args.log_file and not reporter.filename:
+ reporter.filename = default_log_file(specs[0])
+ reporter.specs = specs
+
+ tty.msg("Installing environment {0}".format(env.name))
+ with reporter:
+ env.install_all(args, **kwargs)
+
+ tty.debug("Regenerating environment views for {0}"
+ .format(env.name))
with env.write_transaction():
# It is not strictly required to synchronize view regeneration
# but doing so can prevent redundant work in the filesystem.
@@ -319,17 +339,13 @@ environment variables:
spack.config.set('config:checksum', False, scope='command_line')
# Parse cli arguments and construct a dictionary
- # that will be passed to Package.do_install API
+ # that will be passed to the package installer
update_kwargs_from_args(args, kwargs)
if args.run_tests:
tty.warn("Deprecated option: --run-tests: use --test=all instead")
# 1. Abstract specs from cli
- reporter = spack.report.collect_info(args.log_format, args)
- if args.log_file:
- reporter.filename = args.log_file
-
abstract_specs = spack.cmd.parse_specs(args.spec)
tests = False
if args.test == 'all' or args.run_tests:
@@ -401,5 +417,4 @@ environment variables:
# overwrite all concrete explicit specs from this build
kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
- for abstract, concrete in zip(abstract_specs, specs):
- install_spec(args, kwargs, abstract, concrete)
+ install_specs(args, kwargs, zip(abstract_specs, specs))
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index 42b4cb819a..0b63bad6a7 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -39,6 +39,7 @@ from spack.spec_list import SpecList, InvalidSpecConstraintError
from spack.variant import UnknownVariantError
import spack.util.lock as lk
from spack.util.path import substitute_path_variables
+from spack.installer import PackageInstaller
#: environment variable used to indicate the active environment
spack_env_var = 'SPACK_ENV'
@@ -1371,24 +1372,7 @@ class Environment(object):
return ret
- def install(self, user_spec, concrete_spec=None, **install_args):
- """Install a single spec into an environment.
-
- This will automatically concretize the single spec, but it won't
- affect other as-yet unconcretized specs.
- """
- concrete = self.concretize_and_add(user_spec, concrete_spec)
-
- self._install(concrete, **install_args)
-
- def _install(self, spec, **install_args):
- # "spec" must be concrete
- package = spec.package
-
- install_args['overwrite'] = install_args.get(
- 'overwrite', []) + self._get_overwrite_specs()
- package.do_install(**install_args)
-
+ def _install_log_links(self, spec):
if not spec.external:
# Make sure log directory exists
log_path = self.log_path
@@ -1402,19 +1386,22 @@ class Environment(object):
os.remove(build_log_link)
os.symlink(spec.package.build_log_path, build_log_link)
- def install_all(self, args=None):
+ def install_all(self, args=None, **install_args):
"""Install all concretized specs in an environment.
Note: this does not regenerate the views for the environment;
that needs to be done separately with a call to write().
+ Args:
+ args (Namespace): argparse namespace with command arguments
+ install_args (dict): keyword install arguments
"""
-
# If "spack install" is invoked repeatedly for a large environment
# where all specs are already installed, the operation can take
# a large amount of time due to repeatedly acquiring and releasing
# locks, this does an initial check across all specs within a single
# DB read transaction to reduce time spent in this case.
+ tty.debug('Assessing installation status of environment packages')
specs_to_install = []
with spack.store.db.read_transaction():
for concretized_hash in self.concretized_order:
@@ -1426,14 +1413,43 @@ class Environment(object):
# If it's a dev build it could need to be reinstalled
specs_to_install.append(spec)
+ if not specs_to_install:
+ tty.msg('All of the packages are already installed')
+ return
+
+ tty.debug('Processing {0} uninstalled specs'
+ .format(len(specs_to_install)))
+
+ install_args['overwrite'] = install_args.get(
+ 'overwrite', []) + self._get_overwrite_specs()
+
+ installs = []
for spec in specs_to_install:
# Parse cli arguments and construct a dictionary
- # that will be passed to Package.do_install API
+ # that will be passed to the package installer
kwargs = dict()
+ if install_args:
+ kwargs.update(install_args)
if args:
spack.cmd.install.update_kwargs_from_args(args, kwargs)
- self._install(spec, **kwargs)
+ installs.append((spec.package, kwargs))
+
+ try:
+ builder = PackageInstaller(installs)
+ builder.install()
+ finally:
+ # Ensure links are set appropriately
+ for spec in specs_to_install:
+ if spec.package.installed:
+ try:
+ self._install_log_links(spec)
+ except OSError as e:
+ tty.warn('Could not install log links for {0}: {1}'
+ .format(spec.name, str(e)))
+
+ with self.write_transaction():
+ self.regenerate_views()
def all_specs(self):
"""Return all specs, even those a user spec would shadow."""
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index 51919f3729..cf7b5bfb99 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -27,6 +27,7 @@ This module supports the coordination of local and distributed concurrent
installations of packages in a Spack instance.
"""
+import copy
import glob
import heapq
import itertools
@@ -36,6 +37,8 @@ import six
import sys
import time
+from collections import defaultdict
+
import llnl.util.filesystem as fs
import llnl.util.lock as lk
import llnl.util.tty as tty
@@ -80,6 +83,31 @@ STATUS_DEQUEUED = 'dequeued'
STATUS_REMOVED = 'removed'
+def _check_last_phase(pkg):
+ """
+ Ensures the specified package has a valid last phase before proceeding
+ with its installation.
+
+ The last phase is also set to None if it is the last phase of the
+ package already.
+
+ Args:
+ pkg (PackageBase): the package being installed
+
+ Raises:
+ ``BadInstallPhase`` if stop_before or last phase is invalid
+ """
+ if pkg.stop_before_phase and pkg.stop_before_phase not in pkg.phases:
+ raise BadInstallPhase(pkg.name, pkg.stop_before_phase)
+
+ if pkg.last_phase and pkg.last_phase not in pkg.phases:
+ raise BadInstallPhase(pkg.name, pkg.last_phase)
+
+ # If we got a last_phase, make sure it's not already last
+ if pkg.last_phase and pkg.last_phase == pkg.phases[-1]:
+ pkg.last_phase = None
+
+
def _handle_external_and_upstream(pkg, explicit):
"""
Determine if the package is external or upstream and register it in the
@@ -96,6 +124,8 @@ def _handle_external_and_upstream(pkg, explicit):
# consists in module file generation and registration in the DB.
if pkg.spec.external:
_process_external_package(pkg, explicit)
+ _print_installed_pkg('{0} (external {1})'
+ .format(pkg.prefix, package_id(pkg)))
return True
if pkg.installed_upstream:
@@ -279,8 +309,8 @@ def _process_external_package(pkg, explicit):
tty.debug('{0} is actually installed in {1}'
.format(pre, spec.external_path))
else:
- tty.msg('{0} externally installed in {1}'
- .format(pre, spec.external_path))
+ tty.debug('{0} externally installed in {1}'
+ .format(pre, spec.external_path))
try:
# Check if the package was already registered in the DB.
@@ -452,6 +482,19 @@ def dump_packages(spec, path):
fs.install_tree(source_pkg_dir, dest_pkg_dir)
+def get_dependent_ids(spec):
+ """
+ Return a list of package ids for the spec's dependents
+
+ Args:
+ spec (Spec): Concretized spec
+
+ Returns:
+ (list of str): list of package ids
+ """
+ return [package_id(d.package) for d in spec.dependents()]
+
+
def install_msg(name, pid):
"""
Colorize the name/id of the package being installed
@@ -472,7 +515,7 @@ def log(pkg):
Copy provenance into the install directory on success
Args:
- pkg (Package): the package that was installed and built
+ pkg (Package): the package that was built and installed
"""
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
@@ -544,61 +587,17 @@ def package_id(pkg):
The identifier is used to track build tasks, locks, install, and
failure statuses.
+ The identifier needs to distinguish between combinations of compilers
+ and packages for combinatorial environments.
+
Args:
pkg (PackageBase): the package from which the identifier is derived
"""
if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when "
"the spec is not concretized.")
- # TODO: Restore use of the dag hash once resolve issues with different
- # TODO: hashes being associated with dependents of different packages
- # TODO: within the same install, such as the hash for callpath being
- # TODO: different for mpich and dyninst in the
- # TODO: test_force_uninstall_and_reinstall_by_hash` test.
-
- # TODO: Is the extra "readability" of the version worth keeping?
- # return "{0}-{1}-{2}".format(pkg.name, pkg.version, pkg.spec.dag_hash())
-
- # TODO: Including the version causes some installs to fail. Specifically
- # TODO: failures occur when the version of a dependent of one of the
- # TODO: packages does not match the version that is installed.
- # return "{0}-{1}".format(pkg.name, pkg.version)
-
- return pkg.name
-
-
-install_args_docstring = """
- cache_only (bool): Fail if binary package unavailable.
- dirty (bool): Don't clean the build environment before installing.
- explicit (bool): True if package was explicitly installed, False
- if package was implicitly installed (as a dependency).
- fail_fast (bool): Fail if any dependency fails to install;
- otherwise, the default is to install as many dependencies as
- possible (i.e., best effort installation).
- fake (bool): Don't really build; install fake stub files instead.
- install_deps (bool): Install dependencies before installing this
- package
- install_source (bool): By default, source is not installed, but
- for debugging it might be useful to keep it around.
- keep_prefix (bool): Keep install prefix on failure. By default,
- destroys it.
- keep_stage (bool): By default, stage is destroyed only if there
- are no exceptions during build. Set to True to keep the stage
- even with exceptions.
- overwrite (list): list of hashes for packages to do overwrite
- installs. Default empty list.
- restage (bool): Force spack to restage the package source.
- skip_patch (bool): Skip patch stage of build if True.
- stop_before (InstallPhase): stop execution before this
- installation phase (or None)
- stop_at (InstallPhase): last installation phase to be executed
- (or None)
- tests (bool or list or set): False to run no tests, True to test
- all packages, or a list of package names to run tests for some
- use_cache (bool): Install from binary package, if available.
- verbose (bool): Display verbose build output (by default,
- suppresses it)
- """
+
+ return "{0}-{1}-{2}".format(pkg.name, pkg.version, pkg.spec.dag_hash())
class PackageInstaller(object):
@@ -611,29 +610,19 @@ class PackageInstaller(object):
instance.
'''
- def __init__(self, pkg):
- """
- Initialize and set up the build specs.
+ def __init__(self, installs=[]):
+ """ Initialize the installer.
Args:
- pkg (PackageBase): the package being installed, whose spec is
- concrete
-
+ installs (list of (pkg, install_args)): list of tuples, where each
+ tuple consists of a package (PackageBase) and its associated
+ install arguments (dict)
Return:
(PackageInstaller) instance
"""
- if not isinstance(pkg, spack.package.PackageBase):
- raise ValueError("{0} must be a package".format(str(pkg)))
-
- if not pkg.spec.concrete:
- raise ValueError("{0}: Can only install concrete packages."
- .format(pkg.spec.name))
-
- # Spec of the package to be built
- self.pkg = pkg
-
- # The identifier used for the explicit package being built
- self.pkg_id = package_id(pkg)
+ # List of build requests
+ self.build_requests = [BuildRequest(pkg, install_args)
+ for pkg, install_args in installs]
# Priority queue of build tasks
self.build_pq = []
@@ -650,16 +639,16 @@ class PackageInstaller(object):
# Cache of installed packages' unique ids
self.installed = set()
- # Cache of overwrite information
- self.overwrite = set()
- self.overwrite_time = time.time()
-
# Data store layout
self.layout = spack.store.layout
# Locks on specs being built, keyed on the package's unique id
self.locks = {}
+ # Cache fail_fast option to ensure if one build request asks to fail
+ # fast then that option applies to all build requests.
+ self.fail_fast = False
+
def __repr__(self):
"""Returns a formal representation of the package installer."""
rep = '{0}('.format(self.__class__.__name__)
@@ -669,24 +658,48 @@ class PackageInstaller(object):
def __str__(self):
"""Returns a printable version of the package installer."""
+ requests = '#requests={0}'.format(len(self.build_requests))
tasks = '#tasks={0}'.format(len(self.build_tasks))
failed = 'failed ({0}) = {1}'.format(len(self.failed), self.failed)
installed = 'installed ({0}) = {1}'.format(
len(self.installed), self.installed)
- return '{0} ({1}): {2}; {3}; {4}'.format(
- self.pkg_id, self.pid, tasks, installed, failed)
+ return '{0}: {1}; {2}; {3}; {4}'.format(
+ self.pid, requests, tasks, installed, failed)
- def _add_bootstrap_compilers(self, pkg):
+ def _add_bootstrap_compilers(self, pkg, request, all_deps):
"""
Add bootstrap compilers and dependencies to the build queue.
Args:
pkg (PackageBase): the package with possible compiler dependencies
+ request (BuildRequest): the associated install request
+ all_deps (defaultdict(set)): dictionary of all dependencies and
+ associated dependents
"""
packages = _packages_needed_to_bootstrap_compiler(pkg)
for (comp_pkg, is_compiler) in packages:
if package_id(comp_pkg) not in self.build_tasks:
- self._push_task(comp_pkg, is_compiler, 0, 0, STATUS_ADDED)
+ self._add_init_task(comp_pkg, request, is_compiler, all_deps)
+
+ def _add_init_task(self, pkg, request, is_compiler, all_deps):
+ """
+ Creates and queus the initial build task for the package.
+
+ Args:
+ pkg (Package): the package to be built and installed
+ request (BuildRequest or None): the associated install request
+ where ``None`` can be used to indicate the package was
+ explicitly requested by the user
+ is_compiler (bool): whether task is for a bootstrap compiler
+ all_deps (defaultdict(set)): dictionary of all dependencies and
+ associated dependents
+ """
+ task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED,
+ self.installed)
+ for dep_id in task.dependencies:
+ all_deps[dep_id].add(package_id(pkg))
+
+ self._push_task(task)
def _check_db(self, spec):
"""Determine if the spec is flagged as installed in the database
@@ -710,11 +723,14 @@ class PackageInstaller(object):
installed_in_db = False
return rec, installed_in_db
- def _check_deps_status(self):
- """Check the install status of the explicit spec's dependencies"""
+ def _check_deps_status(self, request):
+ """Check the install status of the requested package
+ Args:
+ request (BuildRequest): the associated install request
+ """
err = 'Cannot proceed with {0}: {1}'
- for dep in self.spec.traverse(order='post', root=False):
+ for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep_pkg)
@@ -723,7 +739,7 @@ class PackageInstaller(object):
action = "'spack install' the dependency"
msg = '{0} is marked as an install failure: {1}' \
.format(dep_id, action)
- raise InstallError(err.format(self.pkg_id, msg))
+ raise InstallError(err.format(request.pkg_id, msg))
# Attempt to get a write lock to ensure another process does not
# uninstall the dependency while the requested spec is being
@@ -731,27 +747,24 @@ class PackageInstaller(object):
ltype, lock = self._ensure_locked('write', dep_pkg)
if lock is None:
msg = '{0} is write locked by another process'.format(dep_id)
- raise InstallError(err.format(self.pkg_id, msg))
+ raise InstallError(err.format(request.pkg_id, msg))
# Flag external and upstream packages as being installed
if dep_pkg.spec.external or dep_pkg.installed_upstream:
- form = 'external' if dep_pkg.spec.external else 'upstream'
- tty.debug('Flagging {0} {1} as installed'.format(form, dep_id))
- self.installed.add(dep_id)
+ self._flag_installed(dep_pkg)
continue
# Check the database to see if the dependency has been installed
# and flag as such if appropriate
rec, installed_in_db = self._check_db(dep)
if installed_in_db and (
- dep.dag_hash() not in self.overwrite or
- rec.installation_time > self.overwrite_time):
+ dep.dag_hash() not in request.overwrite or
+ rec.installation_time > request.overwrite_time):
tty.debug('Flagging {0} as installed per the database'
.format(dep_id))
- self.installed.add(dep_id)
+ self._flag_installed(dep_pkg)
- def _prepare_for_install(self, task, keep_prefix, keep_stage,
- restage=False):
+ def _prepare_for_install(self, task):
"""
Check the database and leftover installation directories/files and
prepare for a new install attempt for an uninstalled package.
@@ -762,13 +775,12 @@ class PackageInstaller(object):
Args:
task (BuildTask): the build task whose associated package is
being checked
- keep_prefix (bool): ``True`` if the prefix is to be kept on
- failure, otherwise ``False``
- keep_stage (bool): ``True`` if the stage is to be kept even if
- there are exceptions, otherwise ``False``
- restage (bool): ``True`` if forcing Spack to restage the package
- source, otherwise ``False``
"""
+ install_args = task.request.install_args
+ keep_prefix = install_args.get('keep_prefix')
+ keep_stage = install_args.get('keep_stage')
+ restage = install_args.get('restage')
+
# Make sure the package is ready to be locally installed.
self._ensure_install_ready(task.pkg)
@@ -797,13 +809,13 @@ class PackageInstaller(object):
task.pkg.stage.destroy()
if not partial and self.layout.check_installed(task.pkg.spec) and (
- rec.spec.dag_hash() not in self.overwrite or
- rec.installation_time > self.overwrite_time
+ rec.spec.dag_hash() not in task.request.overwrite or
+ rec.installation_time > task.request.overwrite_time
):
self._update_installed(task)
# Only update the explicit entry once for the explicit package
- if task.pkg_id == self.pkg_id:
+ if task.explicit:
_update_explicit_entry_in_db(task.pkg, rec, True)
# In case the stage directory has already been created, this
@@ -812,38 +824,6 @@ class PackageInstaller(object):
if not keep_stage:
task.pkg.stage.destroy()
- def _check_last_phase(self, **kwargs):
- """
- Ensures the package being installed has a valid last phase before
- proceeding with the installation.
-
- The ``stop_before`` or ``stop_at`` arguments are removed from the
- installation arguments.
-
- The last phase is also set to None if it is the last phase of the
- package already
-
- Args:
- kwargs:
- ``stop_before``': stop before execution of this phase (or None)
- ``stop_at``': last installation phase to be executed (or None)
- """
- self.pkg.stop_before_phase = kwargs.pop('stop_before', None)
- if self.pkg.stop_before_phase is not None and \
- self.pkg.stop_before_phase not in self.pkg.phases:
- tty.die('\'{0}\' is not an allowed phase for package {1}'
- .format(self.pkg.stop_before_phase, self.pkg.name))
-
- self.pkg.last_phase = kwargs.pop('stop_at', None)
- if self.pkg.last_phase is not None and \
- self.pkg.last_phase not in self.pkg.phases:
- tty.die('\'{0}\' is not an allowed phase for package {1}'
- .format(self.pkg.last_phase, self.pkg.name))
- # If we got a last_phase, make sure it's not already last
- if self.pkg.last_phase and \
- self.pkg.last_phase == self.pkg.phases[-1]:
- self.pkg.last_phase = None
-
def _cleanup_all_tasks(self):
"""Cleanup all build tasks to include releasing their locks."""
for pkg_id in self.locks:
@@ -1000,30 +980,53 @@ class PackageInstaller(object):
self.locks[pkg_id] = (lock_type, lock)
return self.locks[pkg_id]
- def _init_queue(self, install_deps, install_package):
- """
- Initialize the build task priority queue and spec state.
+ def _add_tasks(self, request, all_deps):
+ """Add tasks to the priority queue for the given build request.
+
+ It also tracks all dependents associated with each dependency in
+ order to ensure proper tracking of uninstalled dependencies.
Args:
- install_deps (bool): ``True`` if installing package dependencies,
- otherwise ``False``
- install_package (bool): ``True`` if installing the package,
- otherwise ``False``
+ request (BuildRequest): the associated install request
+ all_deps (defaultdict(set)): dictionary of all dependencies and
+ associated dependents
"""
- tty.debug('Initializing the build queue for {0}'.format(self.pkg.name))
+ tty.debug('Initializing the build queue for {0}'
+ .format(request.pkg.name))
+
+ # Ensure not attempting to perform an installation when user didn't
+ # want to go that far for the requested package.
+ try:
+ _check_last_phase(request.pkg)
+ except BadInstallPhase as err:
+ tty.warn('Installation request refused: {0}'.format(str(err)))
+ return
+
+ # Skip out early if the spec is not being installed locally (i.e., if
+ # external or upstream).
+ #
+ # External and upstream packages need to get flagged as installed to
+ # ensure proper status tracking for environment build.
+ not_local = _handle_external_and_upstream(request.pkg, True)
+ if not_local:
+ self._flag_installed(request.pkg)
+ return
+
install_compilers = spack.config.get(
'config:install_missing_compilers', False)
+ install_deps = request.install_args.get('install_deps')
if install_deps:
- for dep in self.spec.traverse(order='post', root=False):
+ for dep in request.traverse_dependencies():
dep_pkg = dep.package
# First push any missing compilers (if requested)
if install_compilers:
- self._add_bootstrap_compilers(dep_pkg)
+ self._add_bootstrap_compilers(dep_pkg, request, all_deps)
- if package_id(dep_pkg) not in self.build_tasks:
- self._push_task(dep_pkg, False, 0, 0, STATUS_ADDED)
+ dep_id = package_id(dep_pkg)
+ if dep_id not in self.build_tasks:
+ self._add_init_task(dep_pkg, request, False, all_deps)
# Clear any persistent failure markings _unless_ they are
# associated with another process in this parallel build
@@ -1033,21 +1036,26 @@ class PackageInstaller(object):
# Push any missing compilers (if requested) as part of the
# package dependencies.
if install_compilers:
- self._add_bootstrap_compilers(self.pkg)
+ self._add_bootstrap_compilers(request.pkg, request, all_deps)
- if install_package and self.pkg_id not in self.build_tasks:
+ install_package = request.install_args.get('install_package')
+ if install_package and request.pkg_id not in self.build_tasks:
# Be sure to clear any previous failure
- spack.store.db.clear_failure(self.pkg.spec, force=True)
+ spack.store.db.clear_failure(request.spec, force=True)
# If not installing dependencies, then determine their
# installation status before proceeding
if not install_deps:
- self._check_deps_status()
+ self._check_deps_status(request)
# Now add the package itself, if appropriate
- self._push_task(self.pkg, False, 0, 0, STATUS_ADDED)
+ self._add_init_task(request.pkg, request, False, all_deps)
- def _install_task(self, task, **kwargs):
+ # Ensure if one request is to fail fast then all requests will.
+ fail_fast = request.install_args.get('fail_fast')
+ self.fail_fast = self.fail_fast or fail_fast
+
+ def _install_task(self, task):
"""
Perform the installation of the requested spec and/or dependency
represented by the build task.
@@ -1055,15 +1063,15 @@ class PackageInstaller(object):
Args:
task (BuildTask): the installation build task for a package"""
- cache_only = kwargs.get('cache_only', False)
- tests = kwargs.get('tests', False)
- unsigned = kwargs.get('unsigned', False)
- use_cache = kwargs.get('use_cache', True)
- full_hash_match = kwargs.get('full_hash_match', False)
+ install_args = task.request.install_args
+ cache_only = install_args.get('cache_only')
+ explicit = task.explicit
+ full_hash_match = install_args.get('full_hash_match')
+ tests = install_args.get('tests')
+ unsigned = install_args.get('unsigned')
+ use_cache = install_args.get('use_cache')
- pkg = task.pkg
- pkg_id = package_id(pkg)
- explicit = pkg_id == self.pkg_id
+ pkg, pkg_id = task.pkg, task.pkg_id
tty.msg(install_msg(pkg_id, self.pid))
task.start = task.start or time.time()
@@ -1093,7 +1101,7 @@ class PackageInstaller(object):
# Preserve verbosity settings across installs.
spack.package.PackageBase._verbose = (
spack.build_environment.start_build_process(
- pkg, build_process, kwargs)
+ pkg, build_process, install_args)
)
# Note: PARENT of the build process adds the new package to
@@ -1113,8 +1121,6 @@ class PackageInstaller(object):
tty.debug('Package stage directory: {0}'
.format(pkg.stage.source_path))
- _install_task.__doc__ += install_args_docstring
-
def _next_is_pri0(self):
"""
Determine if the next build task has priority 0
@@ -1141,32 +1147,39 @@ class PackageInstaller(object):
return task
return None
- def _push_task(self, pkg, compiler, start, attempts, status):
+ def _push_task(self, task):
"""
- Create and push (or queue) a build task for the package.
+ Push (or queue) the specified build task for the package.
Source: Customization of "add_task" function at
docs.python.org/2/library/heapq.html
+
+ Args:
+ task (BuildTask): the installation build task for a package
"""
msg = "{0} a build task for {1} with status '{2}'"
- pkg_id = package_id(pkg)
+ skip = 'Skipping requeue of task for {0}: {1}'
- # Ensure do not (re-)queue installed or failed packages.
- assert pkg_id not in self.installed
- assert pkg_id not in self.failed
+ # Ensure do not (re-)queue installed or failed packages whose status
+ # may have been determined by a separate process.
+ if task.pkg_id in self.installed:
+ tty.debug(skip.format(task.pkg_id, 'installed'))
+ return
+
+ if task.pkg_id in self.failed:
+ tty.debug(skip.format(task.pkg_id, 'failed'))
+ return
# Remove any associated build task since its sequence will change
- self._remove_task(pkg_id)
- desc = 'Queueing' if attempts == 0 else 'Requeueing'
- tty.verbose(msg.format(desc, pkg_id, status))
+ self._remove_task(task.pkg_id)
+ desc = 'Queueing' if task.attempts == 0 else 'Requeueing'
+ tty.verbose(msg.format(desc, task.pkg_id, task.status))
# Now add the new task to the queue with a new sequence number to
# ensure it is the last entry popped with the same priority. This
# is necessary in case we are re-queueing a task whose priority
# was decremented due to the installation of one of its dependencies.
- task = BuildTask(pkg, compiler, start, attempts, status,
- self.installed)
- self.build_tasks[pkg_id] = task
+ self.build_tasks[task.pkg_id] = task
heapq.heappush(self.build_pq, (task.key, task))
def _release_lock(self, pkg_id):
@@ -1221,16 +1234,16 @@ class PackageInstaller(object):
tty.msg('{0} {1}'.format(install_msg(task.pkg_id, self.pid),
'in progress by another process'))
- start = task.start or time.time()
- self._push_task(task.pkg, task.compiler, start, task.attempts,
- STATUS_INSTALLING)
+ new_task = task.next_attempt(self.installed)
+ new_task.status = STATUS_INSTALLING
+ self._push_task(new_task)
def _setup_install_dir(self, pkg):
"""
Create and ensure proper access controls for the install directory.
Args:
- pkg (Package): the package to be installed and built
+ pkg (Package): the package to be built and installed
"""
if not os.path.exists(pkg.spec.prefix):
tty.verbose('Creating the installation directory {0}'
@@ -1268,7 +1281,7 @@ class PackageInstaller(object):
err = '' if exc is None else ': {0}'.format(str(exc))
tty.debug('Flagging {0} as failed{1}'.format(pkg_id, err))
if mark:
- self.failed[pkg_id] = spack.store.db.mark_failed(task.spec)
+ self.failed[pkg_id] = spack.store.db.mark_failed(task.pkg.spec)
else:
self.failed[pkg_id] = None
task.status = STATUS_FAILED
@@ -1288,77 +1301,90 @@ class PackageInstaller(object):
def _update_installed(self, task):
"""
- Mark the task's spec as installed and update the dependencies of its
- dependents.
+ Mark the task as installed and ensure dependent build tasks are aware.
Args:
task (BuildTask): the build task for the installed package
"""
- pkg_id = task.pkg_id
+ task.status = STATUS_INSTALLED
+ self._flag_installed(task.pkg, task.dependents)
+
+ def _flag_installed(self, pkg, dependent_ids=None):
+ """
+ Flag the package as installed and ensure known by all build tasks of
+ known dependents.
+
+ Args:
+ pkg (Package): Package that has been installed locally, externally
+ or upstream
+ dependent_ids (list of str or None): list of the package's
+ dependent ids, or None if the dependent ids are limited to
+ those maintained in the package (dependency DAG)
+ """
+ pkg_id = package_id(pkg)
+
+ if pkg_id in self.installed:
+ # Already determined the package has been installed
+ return
+
tty.debug('Flagging {0} as installed'.format(pkg_id))
self.installed.add(pkg_id)
- task.status = STATUS_INSTALLED
- for dep_id in task.dependents:
+
+ # Update affected dependents
+ dependent_ids = dependent_ids or get_dependent_ids(pkg.spec)
+ for dep_id in set(dependent_ids):
tty.debug('Removing {0} from {1}\'s uninstalled dependencies.'
.format(pkg_id, dep_id))
if dep_id in self.build_tasks:
# Ensure the dependent's uninstalled dependencies are
# up-to-date. This will require requeueing the task.
dep_task = self.build_tasks[dep_id]
- dep_task.flag_installed(self.installed)
- self._push_task(dep_task.pkg, dep_task.compiler,
- dep_task.start, dep_task.attempts,
- dep_task.status)
+ self._push_task(dep_task.next_attempt(self.installed))
else:
tty.debug('{0} has no build task to update for {1}\'s success'
.format(dep_id, pkg_id))
- def install(self, **kwargs):
- """
- Install the package and/or associated dependencies.
-
- Args:"""
-
- fail_fast = kwargs.get('fail_fast', False)
- install_deps = kwargs.get('install_deps', True)
- keep_prefix = kwargs.get('keep_prefix', False)
- keep_stage = kwargs.get('keep_stage', False)
- restage = kwargs.get('restage', False)
-
- fail_fast_err = 'Terminating after first install failure'
+ def _init_queue(self):
+ """Initialize the build queue from the list of build requests."""
+ all_dependencies = defaultdict(set)
- # install_package defaults True and is popped so that dependencies are
- # always installed regardless of whether the root was installed
- install_package = kwargs.pop('install_package', True)
+ tty.debug('Initializing the build queue from the build requests')
+ for request in self.build_requests:
+ self._add_tasks(request, all_dependencies)
- # take a timestamp with the overwrite argument to check whether another
- # process has already overridden the package.
- self.overwrite = set(kwargs.get('overwrite', []))
- if self.overwrite:
- self.overwrite_time = time.time()
+ # Add any missing dependents to ensure proper uninstalled dependency
+ # tracking when installing multiple specs
+ tty.debug('Ensure all dependencies know all dependents across specs')
+ for dep_id in all_dependencies:
+ if dep_id in self.build_tasks:
+ dependents = all_dependencies[dep_id]
+ task = self.build_tasks[dep_id]
+ for dependent_id in dependents.difference(task.dependents):
+ task.add_dependent(dependent_id)
- # Ensure not attempting to perform an installation when user didn't
- # want to go that far.
- self._check_last_phase(**kwargs)
+ def install(self):
+ """
+ Install the requested package(s) and or associated dependencies.
- # Skip out early if the spec is not being installed locally (i.e., if
- # external or upstream).
- not_local = _handle_external_and_upstream(self.pkg, True)
- if not_local:
- return
+ Args:
+ pkg (Package): the package to be built and installed"""
+ self._init_queue()
- # Initialize the build task queue
- self._init_queue(install_deps, install_package)
+ fail_fast_err = 'Terminating after first install failure'
+ single_explicit_spec = len(self.build_requests) == 1
+ failed_explicits = []
+ exists_errors = []
- # Proceed with the installation
while self.build_pq:
task = self._pop_task()
if task is None:
continue
- pkg, spec = task.pkg, task.pkg.spec
- pkg_id = package_id(pkg)
+ install_args = task.request.install_args
+ keep_prefix = install_args.get('keep_prefix')
+
+ pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
tty.verbose('Processing {0}: task={1}'.format(pkg_id, task))
# Ensure that the current spec has NO uninstalled dependencies,
@@ -1372,6 +1398,11 @@ class PackageInstaller(object):
if task.priority != 0:
tty.error('Detected uninstalled dependencies for {0}: {1}'
.format(pkg_id, task.uninstalled_deps))
+ left = [dep_id for dep_id in task.uninstalled_deps if
+ dep_id not in self.installed]
+ if not left:
+ tty.warn('{0} does NOT actually have any uninstalled deps'
+ ' left'.format(pkg_id))
dep_str = 'dependencies' if task.priority > 1 else 'dependency'
raise InstallError(
'Cannot proceed with {0}: {1} uninstalled {2}: {3}'
@@ -1381,11 +1412,9 @@ class PackageInstaller(object):
# Skip the installation if the spec is not being installed locally
# (i.e., if external or upstream) BUT flag it as installed since
# some package likely depends on it.
- if pkg_id != self.pkg_id:
- not_local = _handle_external_and_upstream(pkg, False)
- if not_local:
- self._update_installed(task)
- _print_installed_pkg(pkg.prefix)
+ if not task.explicit:
+ if _handle_external_and_upstream(pkg, False):
+ self._flag_installed(pkg, task.dependents)
continue
# Flag a failed spec. Do not need an (install) prefix lock since
@@ -1394,7 +1423,7 @@ class PackageInstaller(object):
tty.warn('{0} failed to install'.format(pkg_id))
self._update_failed(task)
- if fail_fast:
+ if self.fail_fast:
raise InstallError(fail_fast_err)
continue
@@ -1417,9 +1446,13 @@ class PackageInstaller(object):
self._requeue_task(task)
continue
+ # Take a timestamp with the overwrite argument to allow checking
+ # whether another process has already overridden the package.
+ if task.request.overwrite and task.explicit:
+ task.request.overwrite_time = time.time()
+
# Determine state of installation artifacts and adjust accordingly.
- self._prepare_for_install(task, keep_prefix, keep_stage,
- restage)
+ self._prepare_for_install(task)
# Flag an already installed package
if pkg_id in self.installed:
@@ -1464,23 +1497,24 @@ class PackageInstaller(object):
# Proceed with the installation since we have an exclusive write
# lock on the package.
try:
- if pkg.spec.dag_hash() in self.overwrite:
+ if pkg.spec.dag_hash() in task.request.overwrite:
rec, _ = self._check_db(pkg.spec)
if rec and rec.installed:
- if rec.installation_time < self.overwrite_time:
+ if rec.installation_time < task.request.overwrite_time:
# If it's actually overwriting, do a fs transaction
if os.path.exists(rec.path):
with fs.replace_directory_transaction(
rec.path):
- self._install_task(task, **kwargs)
+ self._install_task(task)
else:
tty.debug("Missing installation to overwrite")
- self._install_task(task, **kwargs)
+ self._install_task(task)
else:
# overwriting nothing
- self._install_task(task, **kwargs)
+ self._install_task(task)
else:
- self._install_task(task, **kwargs)
+ self._install_task(task)
+
self._update_installed(task)
# If we installed then we should keep the prefix
@@ -1489,41 +1523,55 @@ class PackageInstaller(object):
keep_prefix = keep_prefix or \
(stop_before_phase is None and last_phase is None)
- except spack.directory_layout.InstallDirectoryAlreadyExistsError:
- tty.debug("Keeping existing install prefix in place.")
+ except spack.directory_layout.InstallDirectoryAlreadyExistsError \
+ as err:
+ tty.debug('Install prefix for {0} exists, keeping {1} in '
+ 'place.'.format(pkg.name, pkg.prefix))
self._update_installed(task)
- raise
+
+ # Only terminate at this point if a single build request was
+ # made.
+ if task.explicit and single_explicit_spec:
+ raise
+
+ if task.explicit:
+ exists_errors.append((pkg_id, str(err)))
except KeyboardInterrupt as exc:
- # The build has been terminated with a Ctrl-C so terminate.
+ # The build has been terminated with a Ctrl-C so terminate
+ # regardless of the number of remaining specs.
err = 'Failed to install {0} due to {1}: {2}'
tty.error(err.format(pkg.name, exc.__class__.__name__,
str(exc)))
raise
except (Exception, SystemExit) as exc:
+ self._update_failed(task, True, exc)
+
# Best effort installs suppress the exception and mark the
- # package as a failure UNLESS this is the explicit package.
+ # package as a failure.
if (not isinstance(exc, spack.error.SpackError) or
not exc.printed):
# SpackErrors can be printed by the build process or at
# lower levels -- skip printing if already printed.
- # TODO: sort out this and SpackEror.print_context()
- err = 'Failed to install {0} due to {1}: {2}'
- tty.error(
- err.format(pkg.name, exc.__class__.__name__, str(exc)))
-
- self._update_failed(task, True, exc)
-
- if fail_fast:
- # The user requested the installation to terminate on
- # failure.
+ # TODO: sort out this and SpackError.print_context()
+ tty.error('Failed to install {0} due to {1}: {2}'
+ .format(pkg.name, exc.__class__.__name__,
+ str(exc)))
+ # Terminate if requested to do so on the first failure.
+ if self.fail_fast:
raise InstallError('{0}: {1}'
.format(fail_fast_err, str(exc)))
- if pkg_id == self.pkg_id:
+ # Terminate at this point if the single explicit spec has
+ # failed to install.
+ if single_explicit_spec and task.explicit:
raise
+ # Track explicit spec id and error to summarize when done
+ if task.explicit:
+ failed_explicits.append((pkg_id, str(exc)))
+
finally:
# Remove the install prefix if anything went wrong during
# install.
@@ -1542,20 +1590,16 @@ class PackageInstaller(object):
# Cleanup, which includes releasing all of the read locks
self._cleanup_all_tasks()
- # Ensure we properly report if the original/explicit pkg is failed
- if self.pkg_id in self.failed:
- msg = ('Installation of {0} failed. Review log for details'
- .format(self.pkg_id))
- raise InstallError(msg)
+ # Ensure we properly report if one or more explicit specs failed
+ if exists_errors or failed_explicits:
+ for pkg_id, err in exists_errors:
+ tty.error('{0}: {1}'.format(pkg_id, err))
- install.__doc__ += install_args_docstring
+ for pkg_id, err in failed_explicits:
+ tty.error('{0}: {1}'.format(pkg_id, err))
- # Helper method to "smooth" the transition from the
- # spack.package.PackageBase class
- @property
- def spec(self):
- """The specification associated with the package."""
- return self.pkg.spec
+ raise InstallError('Installation request failed. Refer to '
+ 'recent errors for specific package(s).')
def build_process(pkg, kwargs):
@@ -1672,14 +1716,17 @@ def build_process(pkg, kwargs):
class BuildTask(object):
"""Class for representing the build task for a package."""
- def __init__(self, pkg, compiler, start, attempts, status, installed):
+ def __init__(self, pkg, request, compiler, start, attempts, status,
+ installed):
"""
Instantiate a build task for a package.
Args:
- pkg (Package): the package to be installed and built
- compiler (bool): ``True`` if the task is for a bootstrap compiler,
- otherwise, ``False``
+ pkg (Package): the package to be built and installed
+ request (BuildRequest or None): the associated install request
+ where ``None`` can be used to indicate the package was
+ explicitly requested by the user
+ compiler (bool): whether task is for a bootstrap compiler
start (int): the initial start time for the package, in seconds
attempts (int): the number of attempts to install the package
status (str): the installation status
@@ -1699,6 +1746,12 @@ class BuildTask(object):
# The "unique" identifier for the task's package
self.pkg_id = package_id(self.pkg)
+ # The explicit build request associated with the package
+ if not isinstance(request, BuildRequest):
+ raise ValueError("{0} must have a build request".format(str(pkg)))
+
+ self.request = request
+
# Initialize the status to an active state. The status is used to
# ensure priority queue invariants when tasks are "removed" from the
# queue.
@@ -1714,28 +1767,26 @@ class BuildTask(object):
# The initial start time for processing the spec
self.start = start
- # Number of times the task has been queued
- self.attempts = attempts + 1
-
- # Set of dependents
- self.dependents = set(package_id(d.package) for d
- in self.spec.dependents())
+ # Set of dependents, which needs to include the requesting package
+ # to support tracking of parallel, multi-spec, environment installs.
+ self.dependents = set(get_dependent_ids(self.pkg.spec))
# Set of dependencies
#
# Be consistent wrt use of dependents and dependencies. That is,
# if use traverse for transitive dependencies, then must remove
# transitive dependents on failure.
+ deptypes = self.request.get_deptypes(self.pkg)
self.dependencies = set(package_id(d.package) for d in
- self.spec.dependencies() if
- package_id(d.package) != self.pkg_id)
+ self.pkg.spec.dependencies(deptype=deptypes)
+ if package_id(d.package) != self.pkg_id)
# Handle bootstrapped compiler
#
# The bootstrapped compiler is not a dependency in the spec, but it is
# a dependency of the build task. Here we add it to self.dependencies
- compiler_spec = self.spec.compiler
- arch_spec = self.spec.architecture
+ compiler_spec = self.pkg.spec.compiler
+ arch_spec = self.pkg.spec.architecture
if not spack.compilers.compilers_for_spec(compiler_spec,
arch_spec=arch_spec):
# The compiler is in the queue, identify it as dependency
@@ -1751,9 +1802,27 @@ class BuildTask(object):
self.uninstalled_deps = set(pkg_id for pkg_id in self.dependencies if
pkg_id not in installed)
- # Ensure the task gets a unique sequence number to preserve the
- # order in which it was added.
- self.sequence = next(_counter)
+ # Ensure key sequence-related properties are updated accordingly.
+ self.attempts = 0
+ self._update()
+
+ def __eq__(self, other):
+ return self.key == other.key
+
+ def __ge__(self, other):
+ return self.key >= other.key
+
+ def __gt__(self, other):
+ return self.key > other.key
+
+ def __le__(self, other):
+ return self.key <= other.key
+
+ def __lt__(self, other):
+ return self.key < other.key
+
+ def __ne__(self, other):
+ return self.key != other.key
def __repr__(self):
"""Returns a formal representation of the build task."""
@@ -1768,6 +1837,28 @@ class BuildTask(object):
return ('priority={0}, status={1}, start={2}, {3}'
.format(self.priority, self.status, self.start, dependencies))
+ def _update(self):
+ """Update properties associated with a new instance of a task."""
+ # Number of times the task has/will be queued
+ self.attempts = self.attempts + 1
+
+ # Ensure the task gets a unique sequence number to preserve the
+ # order in which it is added.
+ self.sequence = next(_counter)
+
+ def add_dependent(self, pkg_id):
+ """
+ Ensure the dependent package id is in the task's list so it will be
+ properly updated when this package is installed.
+
+ Args:
+ pkg_id (str): package identifier of the dependent package
+ """
+ if pkg_id != self.pkg_id and pkg_id not in self.dependents:
+ tty.debug('Adding {0} as a dependent of {1}'
+ .format(pkg_id, self.pkg_id))
+ self.dependents.add(pkg_id)
+
def flag_installed(self, installed):
"""
Ensure the dependency is not considered to still be uninstalled.
@@ -1783,20 +1874,162 @@ class BuildTask(object):
.format(self.pkg_id, pkg_id, self.uninstalled_deps))
@property
+ def explicit(self):
+ """The package was explicitly requested by the user."""
+ return self.pkg == self.request.pkg
+
+ @property
def key(self):
"""The key is the tuple (# uninstalled dependencies, sequence)."""
return (self.priority, self.sequence)
+ def next_attempt(self, installed):
+ """Create a new, updated task for the next installation attempt."""
+ task = copy.copy(self)
+ task._update()
+ task.start = self.start or time.time()
+ task.flag_installed(installed)
+ return task
+
@property
def priority(self):
"""The priority is based on the remaining uninstalled dependencies."""
return len(self.uninstalled_deps)
+
+class BuildRequest(object):
+ """Class for representing an installation request."""
+
+ def __init__(self, pkg, install_args):
+ """
+ Instantiate a build request for a package.
+
+ Args:
+ pkg (Package): the package to be built and installed
+ install_args (dict): the install arguments associated with ``pkg``
+ """
+ # Ensure dealing with a package that has a concrete spec
+ if not isinstance(pkg, spack.package.PackageBase):
+ raise ValueError("{0} must be a package".format(str(pkg)))
+
+ self.pkg = pkg
+ if not self.pkg.spec.concrete:
+ raise ValueError("{0} must have a concrete spec"
+ .format(self.pkg.name))
+
+ # Cache the package phase options with the explicit package,
+ # popping the options to ensure installation of associated
+ # dependencies is NOT affected by these options.
+ self.pkg.stop_before_phase = install_args.pop('stop_before', None)
+ self.pkg.last_phase = install_args.pop('stop_at', None)
+
+ # Cache the package id for convenience
+ self.pkg_id = package_id(pkg)
+
+ # Save off the original install arguments plus standard defaults
+ # since they apply to the requested package *and* dependencies.
+ self.install_args = install_args if install_args else {}
+ self._add_default_args()
+
+ # Cache overwrite information
+ self.overwrite = set(self.install_args.get('overwrite', []))
+ self.overwrite_time = time.time()
+
+ # Save off dependency package ids for quick checks since traversals
+ # are not able to return full dependents for all packages across
+ # environment specs.
+ deptypes = self.get_deptypes(self.pkg)
+ self.dependencies = set(package_id(d.package) for d in
+ self.pkg.spec.dependencies(deptype=deptypes)
+ if package_id(d.package) != self.pkg_id)
+
+ def __repr__(self):
+ """Returns a formal representation of the build request."""
+ rep = '{0}('.format(self.__class__.__name__)
+ for attr, value in self.__dict__.items():
+ rep += '{0}={1}, '.format(attr, value.__repr__())
+ return '{0})'.format(rep.strip(', '))
+
+ def __str__(self):
+ """Returns a printable version of the build request."""
+ return 'package={0}, install_args={1}' \
+ .format(self.pkg.name, self.install_args)
+
+ def _add_default_args(self):
+ """Ensure standard install options are set to at least the default."""
+ for arg, default in [('cache_only', False),
+ ('dirty', False),
+ ('fail_fast', False),
+ ('fake', False),
+ ('full_hash_match', False),
+ ('install_deps', True),
+ ('install_package', True),
+ ('install_source', False),
+ ('keep_prefix', False),
+ ('keep_stage', False),
+ ('restage', False),
+ ('skip_patch', False),
+ ('tests', False),
+ ('unsigned', False),
+ ('use_cache', True),
+ ('verbose', False), ]:
+ _ = self.install_args.setdefault(arg, default)
+
+ def get_deptypes(self, pkg):
+ """Determine the required dependency types for the associated package.
+
+ Args:
+ pkg (PackageBase): explicit or implicit package being installed
+
+ Returns:
+ (tuple) required dependency type(s) for the package
+ """
+ deptypes = ['link', 'run']
+ if not self.install_args.get('cache_only'):
+ deptypes.append('build')
+ if self.run_tests(pkg):
+ deptypes.append('test')
+ return tuple(sorted(deptypes))
+
+ def has_dependency(self, dep_id):
+ """Returns ``True`` if the package id represents a known dependency
+ of the requested package, ``False`` otherwise."""
+ return dep_id in self.dependencies
+
+ def run_tests(self, pkg):
+ """Determine if the tests should be run for the provided packages
+
+ Args:
+ pkg (PackageBase): explicit or implicit package being installed
+
+ Returns:
+ (bool) ``True`` if they should be run; ``False`` otherwise
+ """
+ tests = self.install_args.get('tests', False)
+ return tests is True or (tests and pkg.name in tests)
+
@property
def spec(self):
"""The specification associated with the package."""
return self.pkg.spec
+ def traverse_dependencies(self):
+ """
+ Yield any dependencies of the appropriate type(s)
+
+ Yields:
+ (Spec) The next child spec in the DAG
+ """
+ get_spec = lambda s: s.spec
+
+ deptypes = self.get_deptypes(self.pkg)
+ tty.debug('Processing dependencies for {0}: {1}'
+ .format(self.pkg_id, deptypes))
+ for dspec in self.spec.traverse_edges(
+ deptype=deptypes, order='post', root=False,
+ direction='children'):
+ yield get_spec(dspec)
+
class InstallError(spack.error.SpackError):
"""Raised when something goes wrong during install or uninstall."""
@@ -1805,6 +2038,15 @@ class InstallError(spack.error.SpackError):
super(InstallError, self).__init__(message, long_msg)
+class BadInstallPhase(InstallError):
+ """Raised for an install phase option is not allowed for a package."""
+
+ def __init__(self, pkg_name, phase):
+ super(BadInstallPhase, self).__init__(
+ '\'{0}\' is not a valid phase for package {1}'
+ .format(phase, pkg_name))
+
+
class ExternalPackageError(InstallError):
"""Raised by install() when a package is only for external use."""
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index a692d04052..725e1a69d3 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -53,8 +53,7 @@ from six import StringIO
from six import string_types
from six import with_metaclass
from spack.filesystem_view import YamlFilesystemView
-from spack.installer import \
- install_args_docstring, PackageInstaller, InstallError
+from spack.installer import PackageInstaller, InstallError
from spack.stage import stage_prefix, Stage, ResourceStage, StageComposite
from spack.util.package_hash import package_hash
from spack.version import Version
@@ -1591,17 +1590,45 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
Package implementations should override install() to describe
their build process.
- Args:"""
+ Args:
+ cache_only (bool): Fail if binary package unavailable.
+ dirty (bool): Don't clean the build environment before installing.
+ explicit (bool): True if package was explicitly installed, False
+ if package was implicitly installed (as a dependency).
+ fail_fast (bool): Fail if any dependency fails to install;
+ otherwise, the default is to install as many dependencies as
+ possible (i.e., best effort installation).
+ fake (bool): Don't really build; install fake stub files instead.
+ force (bool): Install again, even if already installed.
+ install_deps (bool): Install dependencies before installing this
+ package
+ install_source (bool): By default, source is not installed, but
+ for debugging it might be useful to keep it around.
+ keep_prefix (bool): Keep install prefix on failure. By default,
+ destroys it.
+ keep_stage (bool): By default, stage is destroyed only if there
+ are no exceptions during build. Set to True to keep the stage
+ even with exceptions.
+ restage (bool): Force spack to restage the package source.
+ skip_patch (bool): Skip patch stage of build if True.
+ stop_before (InstallPhase): stop execution before this
+ installation phase (or None)
+ stop_at (InstallPhase): last installation phase to be executed
+ (or None)
+ tests (bool or list or set): False to run no tests, True to test
+ all packages, or a list of package names to run tests for some
+ use_cache (bool): Install from binary package, if available.
+ verbose (bool): Display verbose build output (by default,
+ suppresses it)
+ """
# Non-transitive dev specs need to keep the dev stage and be built from
# source every time. Transitive ones just need to be built from source.
dev_path_var = self.spec.variants.get('dev_path', None)
if dev_path_var:
kwargs['keep_stage'] = True
- builder = PackageInstaller(self)
- builder.install(**kwargs)
-
- do_install.__doc__ += install_args_docstring
+ builder = PackageInstaller([(self, kwargs)])
+ builder.install()
def unit_test_check(self):
"""Hook for unit tests to assert things about package internals.
diff --git a/lib/spack/spack/report.py b/lib/spack/spack/report.py
index e500f9fc05..4e5bc9c993 100644
--- a/lib/spack/spack/report.py
+++ b/lib/spack/spack/report.py
@@ -44,8 +44,8 @@ def fetch_package_log(pkg):
class InfoCollector(object):
- """Decorates PackageInstaller._install_task, which is called by
- PackageBase.do_install for each spec, to collect information
+ """Decorates PackageInstaller._install_task, which is called via
+ PackageBase.do_install for individual specs, to collect information
on the installation of certain specs.
When exiting the context this change will be rolled-back.
diff --git a/lib/spack/spack/test/buildrequest.py b/lib/spack/spack/test/buildrequest.py
new file mode 100644
index 0000000000..5918838edc
--- /dev/null
+++ b/lib/spack/spack/test/buildrequest.py
@@ -0,0 +1,55 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import pytest
+
+import spack.installer as inst
+import spack.repo
+import spack.spec
+
+
+def test_build_request_errors(install_mockery):
+ with pytest.raises(ValueError, match='must be a package'):
+ inst.BuildRequest('abc', {})
+
+ pkg = spack.repo.get('trivial-install-test-package')
+ with pytest.raises(ValueError, match='must have a concrete spec'):
+ inst.BuildRequest(pkg, {})
+
+
+def test_build_request_basics(install_mockery):
+ spec = spack.spec.Spec('dependent-install')
+ spec.concretize()
+ assert spec.concrete
+
+ # Ensure key properties match expectations
+ request = inst.BuildRequest(spec.package, {})
+ assert not request.pkg.stop_before_phase
+ assert not request.pkg.last_phase
+ assert request.spec == spec.package.spec
+
+ # Ensure key default install arguments are set
+ assert 'install_package' in request.install_args
+ assert 'install_deps' in request.install_args
+
+
+def test_build_request_strings(install_mockery):
+ """Tests of BuildRequest repr and str for coverage purposes."""
+ # Using a package with one dependency
+ spec = spack.spec.Spec('dependent-install')
+ spec.concretize()
+ assert spec.concrete
+
+ # Ensure key properties match expectations
+ request = inst.BuildRequest(spec.package, {})
+
+ # Cover __repr__
+ irep = request.__repr__()
+ assert irep.startswith(request.__class__.__name__)
+
+ # Cover __str__
+ istr = str(request)
+ assert "package=dependent-install" in istr
+ assert "install_args=" in istr
diff --git a/lib/spack/spack/test/buildtask.py b/lib/spack/spack/test/buildtask.py
index dcf3d29a6f..f82ff6861a 100644
--- a/lib/spack/spack/test/buildtask.py
+++ b/lib/spack/spack/test/buildtask.py
@@ -12,17 +12,22 @@ import spack.spec
def test_build_task_errors(install_mockery):
with pytest.raises(ValueError, match='must be a package'):
- inst.BuildTask('abc', False, 0, 0, 0, [])
+ inst.BuildTask('abc', None, False, 0, 0, 0, [])
pkg = spack.repo.get('trivial-install-test-package')
with pytest.raises(ValueError, match='must have a concrete spec'):
- inst.BuildTask(pkg, False, 0, 0, 0, [])
+ inst.BuildTask(pkg, None, False, 0, 0, 0, [])
spec = spack.spec.Spec('trivial-install-test-package')
spec.concretize()
assert spec.concrete
+ with pytest.raises(ValueError, match='must have a build request'):
+ inst.BuildTask(spec.package, None, False, 0, 0, 0, [])
+
+ request = inst.BuildRequest(spec.package, {})
with pytest.raises(inst.InstallError, match='Cannot create a build task'):
- inst.BuildTask(spec.package, False, 0, 0, inst.STATUS_REMOVED, [])
+ inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED,
+ [])
def test_build_task_basics(install_mockery):
@@ -31,7 +36,10 @@ def test_build_task_basics(install_mockery):
assert spec.concrete
# Ensure key properties match expectations
- task = inst.BuildTask(spec.package, False, 0, 0, inst.STATUS_ADDED, [])
+ request = inst.BuildRequest(spec.package, {})
+ task = inst.BuildTask(spec.package, request, False, 0, 0,
+ inst.STATUS_ADDED, [])
+ assert task.explicit # package was "explicitly" requested
assert task.priority == len(task.uninstalled_deps)
assert task.key == (task.priority, task.sequence)
@@ -51,7 +59,9 @@ def test_build_task_strings(install_mockery):
assert spec.concrete
# Ensure key properties match expectations
- task = inst.BuildTask(spec.package, False, 0, 0, inst.STATUS_ADDED, [])
+ request = inst.BuildRequest(spec.package, {})
+ task = inst.BuildTask(spec.package, request, False, 0, 0,
+ inst.STATUS_ADDED, [])
# Cover __repr__
irep = task.__repr__()
diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py
index f11025b7b1..30d99c8562 100644
--- a/lib/spack/spack/test/cmd/dev_build.py
+++ b/lib/spack/spack/test/cmd/dev_build.py
@@ -8,7 +8,7 @@ import pytest
import spack.spec
import llnl.util.filesystem as fs
import spack.environment as ev
-from spack.main import SpackCommand, SpackCommandError
+from spack.main import SpackCommand
dev_build = SpackCommand('dev-build')
install = SpackCommand('install')
@@ -99,13 +99,15 @@ def test_dev_build_before_until(tmpdir, mock_packages, install_mockery):
dev_build('-u', 'edit', '-b', 'edit',
'dev-build-test-install@0.0.0')
- with pytest.raises(SpackCommandError):
- dev_build('-u', 'phase_that_does_not_exist',
- 'dev-build-test-install@0.0.0')
+ bad_phase = 'phase_that_does_not_exist'
+ not_allowed = 'is not a valid phase'
+ out = dev_build('-u', bad_phase, 'dev-build-test-install@0.0.0')
+ assert bad_phase in out
+ assert not_allowed in out
- with pytest.raises(SpackCommandError):
- dev_build('-b', 'phase_that_does_not_exist',
- 'dev-build-test-install@0.0.0')
+ out = dev_build('-b', bad_phase, 'dev-build-test-install@0.0.0')
+ assert bad_phase in out
+ assert not_allowed in out
def print_spack_cc(*args):
diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py
index 1a6501440d..5fbd5f2c85 100644
--- a/lib/spack/spack/test/cmd/env.py
+++ b/lib/spack/spack/test/cmd/env.py
@@ -185,17 +185,53 @@ def test_env_modifications_error_on_activate(
assert "Warning: couldn't get environment settings" in err
-def test_env_install_same_spec_twice(install_mockery, mock_fetch, capfd):
+def test_env_install_same_spec_twice(install_mockery, mock_fetch):
env('create', 'test')
e = ev.read('test')
- with capfd.disabled():
- with e:
- # The first installation outputs the package prefix
- install('cmake-client')
- # The second installation attempt will also update the view
- out = install('cmake-client')
- assert 'Updating view at' in out
+ with e:
+ # The first installation outputs the package prefix, updates the view
+ out = install('cmake-client')
+ assert 'Updating view at' in out
+
+ # The second installation reports all packages already installed
+ out = install('cmake-client')
+ assert 'already installed' in out
+
+
+def test_env_install_two_specs_same_dep(
+ install_mockery, mock_fetch, tmpdir, capsys):
+ """Test installation of two packages that share a dependency with no
+ connection and the second specifying the dependency as a 'build'
+ dependency.
+ """
+ path = tmpdir.join('spack.yaml')
+
+ with tmpdir.as_cwd():
+ with open(str(path), 'w') as f:
+ f.write("""\
+env:
+ specs:
+ - a
+ - depb
+""")
+
+ env('create', 'test', 'spack.yaml')
+
+ with ev.read('test'):
+ with capsys.disabled():
+ out = install()
+
+ # Ensure both packages reach install phase processing and are installed
+ out = str(out)
+ assert 'depb: Executing phase:' in out
+ assert 'a: Executing phase:' in out
+
+ depb = spack.repo.path.get_pkg_class('depb')
+ assert depb.installed, 'Expected depb to be installed'
+
+ a = spack.repo.path.get_pkg_class('a')
+ assert a.installed, 'Expected a to be installed'
def test_remove_after_concretize():
@@ -2094,7 +2130,8 @@ def test_cant_install_single_spec_when_concretizing_together():
e.concretization = 'together'
with pytest.raises(ev.SpackEnvironmentError, match=r'cannot install'):
- e.install('zlib')
+ e.concretize_and_add('zlib')
+ e.install_all()
def test_duplicate_packages_raise_when_concretizing_together():
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index 2d7571ef4b..0ba8e6a7c6 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -38,7 +38,7 @@ buildcache = SpackCommand('buildcache')
def noop_install(monkeypatch):
def noop(*args, **kwargs):
pass
- monkeypatch.setattr(spack.package.PackageBase, 'do_install', noop)
+ monkeypatch.setattr(spack.installer.PackageInstaller, 'install', noop)
def test_install_package_and_dependency(
@@ -321,15 +321,19 @@ def test_install_from_file(spec, concretize, error_code, tmpdir):
with specfile.open('w') as f:
spec.to_yaml(f)
+ err_msg = 'does not contain a concrete spec' if error_code else ''
+
# Relative path to specfile (regression for #6906)
with fs.working_dir(specfile.dirname):
# A non-concrete spec will fail to be installed
- install('-f', specfile.basename, fail_on_error=False)
+ out = install('-f', specfile.basename, fail_on_error=False)
assert install.returncode == error_code
+ assert err_msg in out
# Absolute path to specfile (regression for #6983)
- install('-f', str(specfile), fail_on_error=False)
+ out = install('-f', str(specfile), fail_on_error=False)
assert install.returncode == error_code
+ assert err_msg in out
@pytest.mark.disable_clean_stage_check
@@ -615,12 +619,14 @@ def test_build_warning_output(tmpdir, mock_fetch, install_mockery, capfd):
assert 'foo.c:89: warning: some weird warning!' in msg
-def test_cache_only_fails(tmpdir, mock_fetch, install_mockery):
+def test_cache_only_fails(tmpdir, mock_fetch, install_mockery, capfd):
# libelf from cache fails to install, which automatically removes the
- # the libdwarf build task and flags the package as failed to install.
- err_msg = 'Installation of libdwarf failed'
- with pytest.raises(spack.installer.InstallError, match=err_msg):
- install('--cache-only', 'libdwarf')
+ # the libdwarf build task
+ with capfd.disabled():
+ out = install('--cache-only', 'libdwarf')
+
+ assert 'Failed to install libelf' in out
+ assert 'Skipping build of libdwarf' in out
# Check that failure prefix locks are still cached
failure_lock_prefixes = ','.join(spack.store.db._prefix_failures.keys())
@@ -828,6 +834,7 @@ def test_cache_install_full_hash_match(
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
s = Spec('libdwarf').concretized()
+ package_id = spack.installer.package_id(s.package)
# Install a package
install(s.name)
@@ -843,9 +850,9 @@ def test_cache_install_full_hash_match(
# Make sure we get the binary version by default
install_output = install('--no-check-signature', s.name, output=str)
- expect_msg = 'Extracting {0} from binary cache'.format(s.name)
+ expect_extract_msg = 'Extracting {0} from binary cache'.format(package_id)
- assert expect_msg in install_output
+ assert expect_extract_msg in install_output
uninstall('-y', s.name)
@@ -855,9 +862,7 @@ def test_cache_install_full_hash_match(
# Check that even if the full hash changes, we install from binary when
# we don't explicitly require the full hash to match
install_output = install('--no-check-signature', s.name, output=str)
- expect_msg = 'Extracting {0} from binary cache'.format(s.name)
-
- assert expect_msg in install_output
+ assert expect_extract_msg in install_output
uninstall('-y', s.name)
@@ -865,7 +870,7 @@ def test_cache_install_full_hash_match(
# installs from source.
install_output = install('--require-full-hash-match', s.name, output=str)
expect_msg = 'No binary for {0} found: installing from source'.format(
- s.name)
+ package_id)
assert expect_msg in install_output
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index 61475e4dc6..c2a8d0b100 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -507,7 +507,7 @@ def test_unconcretized_install(install_mockery, mock_fetch, mock_packages):
"""Test attempts to perform install phases with unconcretized spec."""
spec = Spec('trivial-install-test-package')
- with pytest.raises(ValueError, match="only install concrete packages"):
+ with pytest.raises(ValueError, match='must have a concrete spec'):
spec.package.do_install()
with pytest.raises(ValueError, match="only patch concrete packages"):
diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py
index 99f0af709c..cec3721d0d 100644
--- a/lib/spack/spack/test/installer.py
+++ b/lib/spack/spack/test/installer.py
@@ -59,34 +59,52 @@ def _true(*args, **kwargs):
return True
-def create_build_task(pkg):
+def create_build_task(pkg, install_args={}):
"""
Create a built task for the given (concretized) package
Args:
pkg (PackageBase): concretized package associated with the task
+ install_args (dict): dictionary of kwargs (or install args)
Return:
(BuildTask) A basic package build task
"""
- return inst.BuildTask(pkg, False, 0, 0, inst.STATUS_ADDED, [])
+ request = inst.BuildRequest(pkg, install_args)
+ return inst.BuildTask(pkg, request, False, 0, 0, inst.STATUS_ADDED, [])
-def create_installer(spec_name):
+def create_installer(installer_args):
"""
- Create an installer for the named spec
+ Create an installer using the concretized spec for each arg
Args:
- spec_name (str): Name of the explicit install spec
+ installer_args (list of tuples): the list of (spec name, kwargs) tuples
Return:
- spec (Spec): concretized spec
installer (PackageInstaller): the associated package installer
"""
- spec = spack.spec.Spec(spec_name)
- spec.concretize()
- assert spec.concrete
- return spec, inst.PackageInstaller(spec.package)
+ const_arg = [(spec.package, kwargs) for spec, kwargs in installer_args]
+ return inst.PackageInstaller(const_arg)
+
+
+def installer_args(spec_names, kwargs={}):
+ """Return a the installer argument with each spec paired with kwargs
+
+ Args:
+ spec_names (list of str): list of spec names
+ kwargs (dict or None): install arguments to apply to all of the specs
+
+ Returns:
+ list of (spec, kwargs): the installer constructor argument
+ """
+ arg = []
+ for name in spec_names:
+ spec = spack.spec.Spec(name)
+ spec.concretize()
+ assert spec.concrete
+ arg.append((spec, kwargs))
+ return arg
@pytest.mark.parametrize('sec,result', [
@@ -99,6 +117,21 @@ def test_hms(sec, result):
assert inst._hms(sec) == result
+def test_get_dependent_ids(install_mockery, mock_packages):
+ # Concretize the parent package, which handle dependency too
+ spec = spack.spec.Spec('a')
+ spec.concretize()
+ assert spec.concrete
+
+ pkg_id = inst.package_id(spec.package)
+
+ # Grab the sole dependency of 'a', which is 'b'
+ dep = spec.dependencies()[0]
+
+ # Ensure the parent package is a dependent of the dependency package
+ assert pkg_id in inst.get_dependent_ids(dep)
+
+
def test_install_msg(monkeypatch):
"""Test results of call to install_msg based on debug level."""
name = 'some-package'
@@ -190,7 +223,9 @@ def test_process_binary_cache_tarball_tar(install_mockery, monkeypatch, capfd):
spec = spack.spec.Spec('a').concretized()
assert inst._process_binary_cache_tarball(spec.package, spec, False, False)
- assert 'Extracting a from binary cache' in capfd.readouterr()[0]
+ out = capfd.readouterr()[0]
+ assert 'Extracting a' in out
+ assert 'from binary cache' in out
def test_try_install_from_binary_cache(install_mockery, mock_packages,
@@ -216,18 +251,9 @@ def test_try_install_from_binary_cache(install_mockery, mock_packages,
assert 'add a spack mirror to allow download' in str(captured)
-def test_installer_init_errors(install_mockery):
- """Test to ensure cover installer constructor errors."""
- with pytest.raises(ValueError, match='must be a package'):
- inst.PackageInstaller('abc')
-
- pkg = spack.repo.get('trivial-install-test-package')
- with pytest.raises(ValueError, match='Can only install concrete'):
- inst.PackageInstaller(pkg)
-
-
def test_installer_repr(install_mockery):
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
irep = installer.__repr__()
assert irep.startswith(installer.__class__.__name__)
@@ -236,7 +262,8 @@ def test_installer_repr(install_mockery):
def test_installer_str(install_mockery):
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
istr = str(installer)
assert "#tasks=0" in istr
@@ -244,20 +271,33 @@ def test_installer_str(install_mockery):
assert "failed (0)" in istr
-def test_installer_last_phase_error(install_mockery, capsys):
- spec = spack.spec.Spec('trivial-install-test-package')
- spec.concretize()
- assert spec.concrete
- with pytest.raises(SystemExit):
- installer = inst.PackageInstaller(spec.package)
- installer.install(stop_at='badphase')
+def test_check_before_phase_error(install_mockery):
+ pkg = spack.repo.get('trivial-install-test-package')
+ pkg.stop_before_phase = 'beforephase'
+ with pytest.raises(inst.BadInstallPhase) as exc_info:
+ inst._check_last_phase(pkg)
- captured = capsys.readouterr()
- assert 'is not an allowed phase' in str(captured)
+ err = str(exc_info.value)
+ assert 'is not a valid phase' in err
+ assert pkg.stop_before_phase in err
+
+
+def test_check_last_phase_error(install_mockery):
+ pkg = spack.repo.get('trivial-install-test-package')
+ pkg.stop_before_phase = None
+ pkg.last_phase = 'badphase'
+ with pytest.raises(inst.BadInstallPhase) as exc_info:
+ inst._check_last_phase(pkg)
+
+ err = str(exc_info.value)
+ assert 'is not a valid phase' in err
+ assert pkg.last_phase in err
def test_installer_ensure_ready_errors(install_mockery):
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
fmt = r'cannot be installed locally.*{0}'
# Force an external package error
@@ -290,7 +330,9 @@ def test_ensure_locked_err(install_mockery, monkeypatch, tmpdir, capsys):
def _raise(lock, timeout):
raise RuntimeError(mock_err_msg)
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
monkeypatch.setattr(ulk.Lock, 'acquire_read', _raise)
with tmpdir.as_cwd():
@@ -304,14 +346,17 @@ def test_ensure_locked_err(install_mockery, monkeypatch, tmpdir, capsys):
def test_ensure_locked_have(install_mockery, tmpdir, capsys):
"""Test _ensure_locked when already have lock."""
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
+ pkg_id = inst.package_id(spec.package)
with tmpdir.as_cwd():
# Test "downgrade" of a read lock (to a read lock)
lock = lk.Lock('./test', default_timeout=1e-9, desc='test')
lock_type = 'read'
tpl = (lock_type, lock)
- installer.locks[installer.pkg_id] = tpl
+ installer.locks[pkg_id] = tpl
assert installer._ensure_locked(lock_type, spec.package) == tpl
# Test "upgrade" of a read lock without read count to a write
@@ -341,7 +386,9 @@ def test_ensure_locked_have(install_mockery, tmpdir, capsys):
def test_ensure_locked_new_lock(
install_mockery, tmpdir, lock_type, reads, writes):
pkg_id = 'a'
- spec, installer = create_installer(pkg_id)
+ const_arg = installer_args([pkg_id], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
with tmpdir.as_cwd():
ltype, lock = installer._ensure_locked(lock_type, spec.package)
assert ltype == lock_type
@@ -359,7 +406,9 @@ def test_ensure_locked_new_warn(install_mockery, monkeypatch, tmpdir, capsys):
return lock
pkg_id = 'a'
- spec, installer = create_installer(pkg_id)
+ const_arg = installer_args([pkg_id], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
monkeypatch.setattr(spack.database.Database, 'prefix_lock', _pl)
@@ -529,52 +578,65 @@ def test_clear_failures_errs(install_mockery, monkeypatch, capsys):
def test_check_deps_status_install_failure(install_mockery, monkeypatch):
- spec, installer = create_installer('a')
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ request = installer.build_requests[0]
# Make sure the package is identified as failed
monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
with pytest.raises(inst.InstallError, match='install failure'):
- installer._check_deps_status()
+ installer._check_deps_status(request)
def test_check_deps_status_write_locked(install_mockery, monkeypatch):
- spec, installer = create_installer('a')
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ request = installer.build_requests[0]
# Ensure the lock is not acquired
monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked', _not_locked)
with pytest.raises(inst.InstallError, match='write locked by another'):
- installer._check_deps_status()
+ installer._check_deps_status(request)
def test_check_deps_status_external(install_mockery, monkeypatch):
- spec, installer = create_installer('a')
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ request = installer.build_requests[0]
# Mock the known dependent, b, as external so assumed to be installed
monkeypatch.setattr(spack.spec.Spec, 'external', True)
- installer._check_deps_status()
- assert 'b' in installer.installed
+ installer._check_deps_status(request)
+ assert list(installer.installed)[0].startswith('b')
def test_check_deps_status_upstream(install_mockery, monkeypatch):
- spec, installer = create_installer('a')
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ request = installer.build_requests[0]
# Mock the known dependent, b, as installed upstream
monkeypatch.setattr(spack.package.PackageBase, 'installed_upstream', True)
- installer._check_deps_status()
- assert 'b' in installer.installed
+ installer._check_deps_status(request)
+ assert list(installer.installed)[0].startswith('b')
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
+ from collections import defaultdict
+
def _pkgs(pkg):
spec = spack.spec.Spec('mpi').concretized()
return [(spec.package, True)]
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
+ request = installer.build_requests[0]
+ all_deps = defaultdict(set)
monkeypatch.setattr(inst, '_packages_needed_to_bootstrap_compiler', _pkgs)
- installer._add_bootstrap_compilers(spec.package)
+ installer._add_bootstrap_compilers(request.pkg, request, all_deps)
ids = list(installer.build_tasks)
assert len(ids) == 1
@@ -584,33 +646,40 @@ def test_add_bootstrap_compilers(install_mockery, monkeypatch):
def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
"""Test of _prepare_for_install's early return for installed task path."""
- spec, installer = create_installer('dependent-install')
- task = create_build_task(spec.package)
+ const_arg = installer_args(['dependent-install'], {})
+ installer = create_installer(const_arg)
+ request = installer.build_requests[0]
+
+ install_args = {'keep_prefix': True, 'keep_stage': True, 'restage': False}
+ task = create_build_task(request.pkg, install_args)
installer.installed.add(task.pkg_id)
monkeypatch.setattr(inst.PackageInstaller, '_ensure_install_ready', _noop)
- installer._prepare_for_install(task, True, True, False)
+ installer._prepare_for_install(task)
-def test_installer_init_queue(install_mockery):
- """Test of installer queue functions."""
+def test_installer_init_requests(install_mockery):
+ """Test of installer initial requests."""
+ spec_name = 'dependent-install'
with spack.config.override('config:install_missing_compilers', True):
- spec, installer = create_installer('dependent-install')
- installer._init_queue(True, True)
+ const_arg = installer_args([spec_name], {})
+ installer = create_installer(const_arg)
- ids = list(installer.build_tasks)
- assert len(ids) == 2
- assert 'dependency-install' in ids
- assert 'dependent-install' in ids
+ # There is only one explicit request in this case
+ assert len(installer.build_requests) == 1
+ request = installer.build_requests[0]
+ assert request.pkg.name == spec_name
def test_install_task_use_cache(install_mockery, monkeypatch):
- spec, installer = create_installer('trivial-install-test-package')
- task = create_build_task(spec.package)
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
+ request = installer.build_requests[0]
+ task = create_build_task(request.pkg)
monkeypatch.setattr(inst, '_install_from_cache', _true)
installer._install_task(task)
- assert spec.package.name in installer.installed
+ assert request.pkg_id in installer.installed
def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
@@ -619,8 +688,9 @@ def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
def _add(_compilers):
tty.msg(config_msg)
- spec, installer = create_installer('a')
- task = create_build_task(spec.package)
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ task = create_build_task(installer.build_requests[0].pkg)
task.compiler = True
# Preclude any meaningful side-effects
@@ -638,7 +708,8 @@ def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
"""Test _release_lock for supposed write lock with exception."""
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
pkg_id = 'test'
with tmpdir.as_cwd():
@@ -652,10 +723,30 @@ def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
assert msg in out
+@pytest.mark.parametrize('installed', [True, False])
+def test_push_task_skip_processed(install_mockery, installed):
+ """Test to ensure skip re-queueing a processed package."""
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ assert len(list(installer.build_tasks)) == 0
+
+ # Mark the package as installed OR failed
+ task = create_build_task(installer.build_requests[0].pkg)
+ if installed:
+ installer.installed.add(task.pkg_id)
+ else:
+ installer.failed[task.pkg_id] = None
+
+ installer._push_task(task)
+
+ assert len(list(installer.build_tasks)) == 0
+
+
def test_requeue_task(install_mockery, capfd):
"""Test to ensure cover _requeue_task."""
- spec, installer = create_installer('a')
- task = create_build_task(spec.package)
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ task = create_build_task(installer.build_requests[0].pkg)
installer._requeue_task(task)
@@ -663,9 +754,12 @@ def test_requeue_task(install_mockery, capfd):
assert len(ids) == 1
qtask = installer.build_tasks[ids[0]]
assert qtask.status == inst.STATUS_INSTALLING
+ assert qtask.sequence > task.sequence
+ assert qtask.attempts == task.attempts + 1
out = capfd.readouterr()[0]
- assert 'Installing a in progress by another process' in out
+ assert 'Installing a' in out
+ assert ' in progress by another process' in out
def test_cleanup_all_tasks(install_mockery, monkeypatch):
@@ -676,7 +770,9 @@ def test_cleanup_all_tasks(install_mockery, monkeypatch):
def _rmtask(installer, pkg_id):
raise RuntimeError('Raise an exception to test except path')
- spec, installer = create_installer('a')
+ const_arg = installer_args(['a'], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
# Cover task removal happy path
installer.build_tasks['a'] = _mktask(spec.package)
@@ -704,7 +800,9 @@ def test_setup_install_dir_grp(install_mockery, monkeypatch, capfd):
monkeypatch.setattr(prefs, 'get_package_group', _get_group)
monkeypatch.setattr(fs, 'chgrp', _chgrp)
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
fs.touchp(spec.prefix)
metadatadir = spack.store.layout.metadata_path(spec)
@@ -725,7 +823,8 @@ def test_cleanup_failed_err(install_mockery, tmpdir, monkeypatch, capsys):
def _raise_except(lock):
raise RuntimeError(msg)
- spec, installer = create_installer('trivial-install-test-package')
+ const_arg = installer_args(['trivial-install-test-package'], {})
+ installer = create_installer(const_arg)
monkeypatch.setattr(lk.Lock, 'release_write', _raise_except)
pkg_id = 'test'
@@ -741,7 +840,9 @@ def test_cleanup_failed_err(install_mockery, tmpdir, monkeypatch, capsys):
def test_update_failed_no_dependent_task(install_mockery):
"""Test _update_failed with missing dependent build tasks."""
- spec, installer = create_installer('dependent-install')
+ const_arg = installer_args(['dependent-install'], {})
+ installer = create_installer(const_arg)
+ spec = installer.build_requests[0].pkg.spec
for dep in spec.traverse(root=False):
task = create_build_task(dep.package)
@@ -751,7 +852,8 @@ def test_update_failed_no_dependent_task(install_mockery):
def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
"""Test install with uninstalled dependencies."""
- spec, installer = create_installer('dependent-install')
+ const_arg = installer_args(['dependent-install'], {})
+ installer = create_installer(const_arg)
# Skip the actual installation and any status updates
monkeypatch.setattr(inst.PackageInstaller, '_install_task', _noop)
@@ -759,7 +861,7 @@ def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
monkeypatch.setattr(inst.PackageInstaller, '_update_failed', _noop)
msg = 'Cannot proceed with dependent-install'
- with pytest.raises(spack.installer.InstallError, match=msg):
+ with pytest.raises(inst.InstallError, match=msg):
installer.install()
out = str(capsys.readouterr())
@@ -768,30 +870,47 @@ def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
def test_install_failed(install_mockery, monkeypatch, capsys):
"""Test install with failed install."""
- spec, installer = create_installer('b')
+ const_arg = installer_args(['b'], {})
+ installer = create_installer(const_arg)
# Make sure the package is identified as failed
monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
- # Skip the actual installation though it should never get there
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _noop)
+ installer.install()
- msg = 'Installation of b failed'
- with pytest.raises(spack.installer.InstallError, match=msg):
- installer.install()
+ out = str(capsys.readouterr())
+ assert installer.build_requests[0].pkg_id in out
+ assert 'failed to install' in out
+
+
+def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
+ """Test install with failed install."""
+ const_arg = installer_args(['a'], {'fail_fast': False})
+ installer = create_installer(const_arg)
+
+ # Make sure the package is identified as failed
+ monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
+
+ installer.install()
out = str(capsys.readouterr())
- assert 'Warning: b failed to install' in out
+ assert 'failed to install' in out
+ assert 'Skipping build of a' in out
def test_install_fail_on_interrupt(install_mockery, monkeypatch):
"""Test ctrl-c interrupted install."""
- err_msg = 'mock keyboard interrupt'
+ spec_name = 'a'
+ err_msg = 'mock keyboard interrupt for {0}'.format(spec_name)
def _interrupt(installer, task, **kwargs):
- raise KeyboardInterrupt(err_msg)
+ if task.pkg.name == spec_name:
+ raise KeyboardInterrupt(err_msg)
+ else:
+ installer.installed.add(task.pkg.name)
- spec, installer = create_installer('a')
+ const_arg = installer_args([spec_name], {})
+ installer = create_installer(const_arg)
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, '_install_task', _interrupt)
@@ -799,41 +918,112 @@ def test_install_fail_on_interrupt(install_mockery, monkeypatch):
with pytest.raises(KeyboardInterrupt, match=err_msg):
installer.install()
+ assert 'b' in installer.installed # ensure dependency of a is 'installed'
+ assert spec_name not in installer.installed
+
+
+def test_install_fail_single(install_mockery, monkeypatch):
+ """Test expected results for failure of single package."""
+ spec_name = 'a'
+ err_msg = 'mock internal package build error for {0}'.format(spec_name)
+
+ class MyBuildException(Exception):
+ pass
+
+ def _install(installer, task, **kwargs):
+ if task.pkg.name == spec_name:
+ raise MyBuildException(err_msg)
+ else:
+ installer.installed.add(task.pkg.name)
+
+ const_arg = installer_args([spec_name], {})
+ installer = create_installer(const_arg)
+
+ # Raise a KeyboardInterrupt error to trigger early termination
+ monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
+
+ with pytest.raises(MyBuildException, match=err_msg):
+ installer.install()
+
+ assert 'b' in installer.installed # ensure dependency of a is 'installed'
+ assert spec_name not in installer.installed
+
+
+def test_install_fail_multi(install_mockery, monkeypatch):
+ """Test expected results for failure of multiple packages."""
+ spec_name = 'c'
+ err_msg = 'mock internal package build error'
+
+ class MyBuildException(Exception):
+ pass
+
+ def _install(installer, task, **kwargs):
+ if task.pkg.name == spec_name:
+ raise MyBuildException(err_msg)
+ else:
+ installer.installed.add(task.pkg.name)
+
+ const_arg = installer_args([spec_name, 'a'], {})
+ installer = create_installer(const_arg)
+
+ # Raise a KeyboardInterrupt error to trigger early termination
+ monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
+
+ with pytest.raises(inst.InstallError, match='Installation request failed'):
+ installer.install()
+
+ assert 'a' in installer.installed # ensure the the second spec installed
+ assert spec_name not in installer.installed
+
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure is detected."""
- spec, installer = create_installer('a')
+ const_arg = installer_args(['b'], {'fail_fast': False})
+ const_arg.extend(installer_args(['c'], {'fail_fast': True}))
+ installer = create_installer(const_arg)
+ pkg_ids = [inst.package_id(spec.package) for spec, _ in const_arg]
- # Make sure the package is identified as failed
+ # Make sure all packages are identified as failed
#
# This will prevent b from installing, which will cause the build of a
# to be skipped.
monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
- with pytest.raises(spack.installer.InstallError):
- installer.install(fail_fast=True)
+ with pytest.raises(inst.InstallError, match='after first install failure'):
+ installer.install()
+
+ assert pkg_ids[0] in installer.failed, 'Expected b to be marked as failed'
+ assert pkg_ids[1] not in installer.failed, \
+ 'Expected no attempt to install c'
- out = str(capsys.readouterr())
- assert 'Skipping build of a' in out
+ out = capsys.readouterr()[1]
+ assert '{0} failed to install'.format(pkg_ids[0]) in out
-def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
- """Test fail_fast install when an install failure results from an error."""
- err_msg = 'mock patch failure'
+def _test_install_fail_fast_on_except_patch(installer, **kwargs):
+ """Helper for test_install_fail_fast_on_except."""
+ # This is a module-scope function and not a local function because it
+ # needs to be pickleable.
+ raise RuntimeError('mock patch failure')
- def _patch(installer, task, **kwargs):
- raise RuntimeError(err_msg)
- spec, installer = create_installer('a')
+def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
+ """Test fail_fast install when an install failure results from an error."""
+ const_arg = installer_args(['a'], {'fail_fast': True})
+ installer = create_installer(const_arg)
# Raise a non-KeyboardInterrupt exception to trigger fast failure.
#
# This will prevent b from installing, which will cause the build of a
# to be skipped.
- monkeypatch.setattr(spack.package.PackageBase, 'do_patch', _patch)
+ monkeypatch.setattr(
+ spack.package.PackageBase,
+ 'do_patch',
+ _test_install_fail_fast_on_except_patch
+ )
- with pytest.raises(spack.installer.InstallError, matches=err_msg):
- installer.install(fail_fast=True)
+ with pytest.raises(inst.InstallError, match='mock patch failure'):
+ installer.install()
out = str(capsys.readouterr())
assert 'Skipping build of a' in out
@@ -844,7 +1034,8 @@ def test_install_lock_failures(install_mockery, monkeypatch, capfd):
def _requeued(installer, task):
tty.msg('requeued {0}' .format(task.pkg.spec.name))
- spec, installer = create_installer('b')
+ const_arg = installer_args(['b'], {})
+ installer = create_installer(const_arg)
# Ensure never acquire a lock
monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked', _not_locked)
@@ -852,9 +1043,6 @@ def test_install_lock_failures(install_mockery, monkeypatch, capfd):
# Ensure don't continually requeue the task
monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
- # Skip the actual installation though should never reach it
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _noop)
-
installer.install()
out = capfd.readouterr()[0]
expected = ['write locked', 'read locked', 'requeued']
@@ -864,22 +1052,21 @@ def test_install_lock_failures(install_mockery, monkeypatch, capfd):
def test_install_lock_installed_requeue(install_mockery, monkeypatch, capfd):
"""Cover basic install handling for installed package."""
- def _install(installer, task, **kwargs):
- tty.msg('{0} installing'.format(task.pkg.spec.name))
+ const_arg = installer_args(['b'], {})
+ b, _ = const_arg[0]
+ installer = create_installer(const_arg)
+ b_pkg_id = inst.package_id(b.package)
- def _prep(installer, task, keep_prefix, keep_stage, restage):
- installer.installed.add('b')
- tty.msg('{0} is installed' .format(task.pkg.spec.name))
+ def _prep(installer, task):
+ installer.installed.add(b_pkg_id)
+ tty.msg('{0} is installed' .format(b_pkg_id))
# also do not allow the package to be locked again
monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked',
_not_locked)
def _requeued(installer, task):
- tty.msg('requeued {0}' .format(task.pkg.spec.name))
-
- # Skip the actual installation though should never reach it
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
+ tty.msg('requeued {0}' .format(inst.package_id(task.pkg)))
# Flag the package as installed
monkeypatch.setattr(inst.PackageInstaller, '_prepare_for_install', _prep)
@@ -887,10 +1074,8 @@ def test_install_lock_installed_requeue(install_mockery, monkeypatch, capfd):
# Ensure don't continually requeue the task
monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
- spec, installer = create_installer('b')
-
installer.install()
- assert 'b' not in installer.installed
+ assert b_pkg_id not in installer.installed
out = capfd.readouterr()[0]
expected = ['is installed', 'read locked', 'requeued']
@@ -902,14 +1087,11 @@ def test_install_read_locked_requeue(install_mockery, monkeypatch, capfd):
"""Cover basic read lock handling for uninstalled package with requeue."""
orig_fn = inst.PackageInstaller._ensure_locked
- def _install(installer, task, **kwargs):
- tty.msg('{0} installing'.format(task.pkg.spec.name))
-
def _read(installer, lock_type, pkg):
tty.msg('{0}->read locked {1}' .format(lock_type, pkg.spec.name))
return orig_fn(installer, 'read', pkg)
- def _prep(installer, task, keep_prefix, keep_stage, restage):
+ def _prep(installer, task):
tty.msg('preparing {0}' .format(task.pkg.spec.name))
assert task.pkg.spec.name not in installer.installed
@@ -919,16 +1101,14 @@ def test_install_read_locked_requeue(install_mockery, monkeypatch, capfd):
# Force a read lock
monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked', _read)
- # Skip the actual installation though should never reach it
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
-
# Flag the package as installed
monkeypatch.setattr(inst.PackageInstaller, '_prepare_for_install', _prep)
# Ensure don't continually requeue the task
monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
- spec, installer = create_installer('b')
+ const_arg = installer_args(['b'], {})
+ installer = create_installer(const_arg)
installer.install()
assert 'b' not in installer.installed
@@ -939,28 +1119,55 @@ def test_install_read_locked_requeue(install_mockery, monkeypatch, capfd):
assert exp in ln
-def test_install_dir_exists(install_mockery, monkeypatch, capfd):
+def test_install_dir_exists(install_mockery, monkeypatch):
"""Cover capture of install directory exists error."""
- err = 'Mock directory exists error'
+ def _install(installer, task):
+ raise dl.InstallDirectoryAlreadyExistsError(task.pkg.prefix)
+
+ # Ensure raise the desired exception
+ monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
+
+ const_arg = installer_args(['b'], {})
+ installer = create_installer(const_arg)
+
+ err = 'already exists'
+ with pytest.raises(dl.InstallDirectoryAlreadyExistsError, match=err):
+ installer.install()
+
+ b, _ = const_arg[0]
+ assert inst.package_id(b.package) in installer.installed
- def _install(installer, task, **kwargs):
- raise dl.InstallDirectoryAlreadyExistsError(err)
+
+def test_install_dir_exists_multi(install_mockery, monkeypatch, capfd):
+ """Cover capture of install directory exists error for multiple specs."""
+ def _install(installer, task):
+ raise dl.InstallDirectoryAlreadyExistsError(task.pkg.prefix)
# Skip the actual installation though should never reach it
monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
- spec, installer = create_installer('b')
+ # Use two packages to ensure multiple specs
+ const_arg = installer_args(['b', 'c'], {})
+ installer = create_installer(const_arg)
- with pytest.raises(dl.InstallDirectoryAlreadyExistsError, match=err):
+ with pytest.raises(inst.InstallError, match='Installation request failed'):
installer.install()
- assert 'b' in installer.installed
+ err = capfd.readouterr()[1]
+ assert 'already exists' in err
+ for spec, install_args in const_arg:
+ pkg_id = inst.package_id(spec.package)
+ assert pkg_id in installer.installed
def test_install_skip_patch(install_mockery, mock_fetch):
"""Test the path skip_patch install path."""
- spec, installer = create_installer('b')
+ spec_name = 'b'
+ const_arg = installer_args([spec_name],
+ {'fake': False, 'skip_patch': True})
+ installer = create_installer(const_arg)
- installer.install(fake=False, skip_patch=True)
+ installer.install()
- assert 'b' in installer.installed
+ spec, install_args = const_arg[0]
+ assert inst.package_id(spec.package) in installer.installed
diff --git a/var/spack/repos/builtin.mock/packages/depb/package.py b/var/spack/repos/builtin.mock/packages/depb/package.py
new file mode 100644
index 0000000000..4af71848d3
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/depb/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class Depb(AutotoolsPackage):
+ """Simple package with one build dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/a-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('b')
+
+ def install(self, spec, prefix):
+ # sanity_check_prefix requires something in the install directory
+ # Test requires overriding the one provided by `AutotoolsPackage`
+ mkdirp(prefix.bin)