summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/developer_guide.rst2
-rw-r--r--lib/spack/docs/packaging_guide.rst10
-rw-r--r--lib/spack/spack/analyzers/libabigail.py2
-rw-r--r--lib/spack/spack/build_environment.py18
-rw-r--r--lib/spack/spack/build_systems/aspell_dict.py2
-rw-r--r--lib/spack/spack/build_systems/autotools.py2
-rw-r--r--lib/spack/spack/build_systems/cached_cmake.py2
-rw-r--r--lib/spack/spack/build_systems/cmake.py2
-rw-r--r--lib/spack/spack/build_systems/cuda.py2
-rw-r--r--lib/spack/spack/build_systems/gnu.py8
-rw-r--r--lib/spack/spack/build_systems/intel.py4
-rw-r--r--lib/spack/spack/build_systems/lua.py2
-rw-r--r--lib/spack/spack/build_systems/makefile.py2
-rw-r--r--lib/spack/spack/build_systems/maven.py2
-rw-r--r--lib/spack/spack/build_systems/meson.py2
-rw-r--r--lib/spack/spack/build_systems/octave.py2
-rw-r--r--lib/spack/spack/build_systems/oneapi.py2
-rw-r--r--lib/spack/spack/build_systems/perl.py2
-rw-r--r--lib/spack/spack/build_systems/python.py7
-rw-r--r--lib/spack/spack/build_systems/qmake.py2
-rw-r--r--lib/spack/spack/build_systems/r.py7
-rw-r--r--lib/spack/spack/build_systems/racket.py7
-rw-r--r--lib/spack/spack/build_systems/rocm.py2
-rw-r--r--lib/spack/spack/build_systems/ruby.py2
-rw-r--r--lib/spack/spack/build_systems/scons.py2
-rw-r--r--lib/spack/spack/build_systems/sip.py2
-rw-r--r--lib/spack/spack/build_systems/sourceforge.py8
-rw-r--r--lib/spack/spack/build_systems/sourceware.py7
-rw-r--r--lib/spack/spack/build_systems/waf.py2
-rw-r--r--lib/spack/spack/build_systems/xorg.py8
-rw-r--r--lib/spack/spack/cmd/checksum.py2
-rw-r--r--lib/spack/spack/cmd/create.py2
-rw-r--r--lib/spack/spack/cmd/dependencies.py4
-rw-r--r--lib/spack/spack/cmd/external.py2
-rw-r--r--lib/spack/spack/cmd/info.py6
-rw-r--r--lib/spack/spack/cmd/install.py2
-rw-r--r--lib/spack/spack/cmd/mark.py2
-rw-r--r--lib/spack/spack/cmd/solve.py2
-rw-r--r--lib/spack/spack/cmd/style.py57
-rw-r--r--lib/spack/spack/cmd/test.py6
-rw-r--r--lib/spack/spack/cmd/uninstall.py4
-rw-r--r--lib/spack/spack/cmd/url.py6
-rw-r--r--lib/spack/spack/detection/common.py2
-rw-r--r--lib/spack/spack/fetch_strategy.py2
-rw-r--r--lib/spack/spack/installer.py55
-rw-r--r--lib/spack/spack/package.py3151
-rw-r--r--lib/spack/spack/package_base.py3107
-rw-r--r--lib/spack/spack/patch.py2
-rw-r--r--lib/spack/spack/pkgkit.py83
-rw-r--r--lib/spack/spack/repo.py18
-rw-r--r--lib/spack/spack/report.py4
-rw-r--r--lib/spack/spack/reporters/cdash.py2
-rw-r--r--lib/spack/spack/reporters/junit.py2
-rw-r--r--lib/spack/spack/solver/asp.py4
-rw-r--r--lib/spack/spack/test/build_systems.py2
-rw-r--r--lib/spack/spack/test/cmd/analyze.py2
-rw-r--r--lib/spack/spack/test/cmd/clean.py4
-rw-r--r--lib/spack/spack/test/cmd/install.py10
-rw-r--r--lib/spack/spack/test/cmd/pkg.py2
-rw-r--r--lib/spack/spack/test/cmd/stage.py10
-rw-r--r--lib/spack/spack/test/cmd/style.py10
-rw-r--r--lib/spack/spack/test/cmd/test.py4
-rw-r--r--lib/spack/spack/test/conftest.py6
-rw-r--r--lib/spack/spack/test/database.py4
-rw-r--r--lib/spack/spack/test/flag_handlers.py2
-rw-r--r--lib/spack/spack/test/install.py8
-rw-r--r--lib/spack/spack/test/installer.py7
-rw-r--r--lib/spack/spack/test/package_class.py6
-rw-r--r--lib/spack/spack/test/package_sanity.py4
-rw-r--r--lib/spack/spack/test/packages.py6
-rw-r--r--lib/spack/spack/test/packaging.py12
-rw-r--r--lib/spack/spack/test/repo.py11
-rw-r--r--lib/spack/spack/test/spec_dag.py2
-rw-r--r--lib/spack/spack/test/test_activations.py2
-rw-r--r--lib/spack/spack/test/versions.py6
-rw-r--r--lib/spack/spack/util/mock_package.py2
-rw-r--r--lib/spack/spack/util/package_hash.py4
-rw-r--r--lib/spack/spack/util/web.py2
-rw-r--r--lib/spack/spack/variant.py2
-rw-r--r--lib/spack/spack/version.py1
80 files changed, 3420 insertions, 3362 deletions
diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst
index 179ec40267..671e6900f5 100644
--- a/lib/spack/docs/developer_guide.rst
+++ b/lib/spack/docs/developer_guide.rst
@@ -151,7 +151,7 @@ Package-related modules
^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.package`
- Contains the :class:`~spack.package.Package` class, which
+ Contains the :class:`~spack.package_base.Package` class, which
is the superclass for all packages in Spack. Methods on ``Package``
implement all phases of the :ref:`package lifecycle
<package-lifecycle>` and manage the build process.
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 4633ef25a8..d0aab3b3a9 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -2393,9 +2393,9 @@ Influence how dependents are built or run
Spack provides a mechanism for dependencies to influence the
environment of their dependents by overriding the
-:meth:`setup_dependent_run_environment <spack.package.PackageBase.setup_dependent_run_environment>`
+:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
or the
-:meth:`setup_dependent_build_environment <spack.package.PackageBase.setup_dependent_build_environment>`
+:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>`
methods.
The Qt package, for instance, uses this call:
@@ -2417,7 +2417,7 @@ will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
variables set appropriately before starting the installation. To make things
even simpler the ``python setup.py`` command is also inserted into the module
scope of dependents by overriding a third method called
-:meth:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`
+:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
@@ -3022,7 +3022,7 @@ The classes that are currently provided by Spack are:
+----------------------------------------------------------+----------------------------------+
| **Base Class** | **Purpose** |
+==========================================================+==================================+
-| :class:`~spack.package.Package` | General base class not |
+| :class:`~spack.package_base.Package` | General base class not |
| | specialized for any build system |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
@@ -3153,7 +3153,7 @@ for the install phase is:
For those not used to Python instance methods, this is the
package itself. In this case it's an instance of ``Foo``, which
extends ``Package``. For API docs on Package objects, see
- :py:class:`Package <spack.package.Package>`.
+ :py:class:`Package <spack.package_base.Package>`.
``spec``
This is the concrete spec object created by Spack from an
diff --git a/lib/spack/spack/analyzers/libabigail.py b/lib/spack/spack/analyzers/libabigail.py
index 25c18d7294..92e2faee0e 100644
--- a/lib/spack/spack/analyzers/libabigail.py
+++ b/lib/spack/spack/analyzers/libabigail.py
@@ -12,7 +12,7 @@ import spack.bootstrap
import spack.error
import spack.hooks
import spack.monitor
-import spack.package
+import spack.package_base
import spack.repo
import spack.util.executable
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 6b461cf672..ae961a695b 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -55,7 +55,7 @@ import spack.build_systems.meson
import spack.config
import spack.install_test
import spack.main
-import spack.package
+import spack.package_base
import spack.paths
import spack.platforms
import spack.repo
@@ -722,7 +722,7 @@ def get_std_cmake_args(pkg):
package were a CMakePackage instance.
Args:
- pkg (spack.package.PackageBase): package under consideration
+ pkg (spack.package_base.PackageBase): package under consideration
Returns:
list: arguments for cmake
@@ -738,7 +738,7 @@ def get_std_meson_args(pkg):
package were a MesonPackage instance.
Args:
- pkg (spack.package.PackageBase): package under consideration
+ pkg (spack.package_base.PackageBase): package under consideration
Returns:
list: arguments for meson
@@ -748,12 +748,12 @@ def get_std_meson_args(pkg):
def parent_class_modules(cls):
"""
- Get list of superclass modules that descend from spack.package.PackageBase
+ Get list of superclass modules that descend from spack.package_base.PackageBase
Includes cls.__module__
"""
- if (not issubclass(cls, spack.package.PackageBase) or
- issubclass(spack.package.PackageBase, cls)):
+ if (not issubclass(cls, spack.package_base.PackageBase) or
+ issubclass(spack.package_base.PackageBase, cls)):
return []
result = []
module = sys.modules.get(cls.__module__)
@@ -771,7 +771,7 @@ def load_external_modules(pkg):
associated with them.
Args:
- pkg (spack.package.PackageBase): package to load deps for
+ pkg (spack.package_base.PackageBase): package to load deps for
"""
for dep in list(pkg.spec.traverse()):
external_modules = dep.external_modules or []
@@ -1109,7 +1109,7 @@ def start_build_process(pkg, function, kwargs):
Args:
- pkg (spack.package.PackageBase): package whose environment we should set up the
+ pkg (spack.package_base.PackageBase): package whose environment we should set up the
child process for.
function (typing.Callable): argless function to run in the child
process.
@@ -1234,7 +1234,7 @@ def get_package_context(traceback, context=3):
if 'self' in frame.f_locals:
# Find the first proper subclass of PackageBase.
obj = frame.f_locals['self']
- if isinstance(obj, spack.package.PackageBase):
+ if isinstance(obj, spack.package_base.PackageBase):
break
# We found obj, the Package implementation we care about.
diff --git a/lib/spack/spack/build_systems/aspell_dict.py b/lib/spack/spack/build_systems/aspell_dict.py
index 7967fb9fa7..5927973906 100644
--- a/lib/spack/spack/build_systems/aspell_dict.py
+++ b/lib/spack/spack/build_systems/aspell_dict.py
@@ -9,7 +9,7 @@ from llnl.util.filesystem import filter_file
from spack.build_systems.autotools import AutotoolsPackage
from spack.directives import extends
-from spack.package import ExtensionError
+from spack.package_base import ExtensionError
from spack.util.executable import which
diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py
index 08c65bcda2..6909fda1b4 100644
--- a/lib/spack/spack/build_systems/autotools.py
+++ b/lib/spack/spack/build_systems/autotools.py
@@ -16,7 +16,7 @@ from llnl.util.filesystem import force_remove, working_dir
from spack.build_environment import InstallError
from spack.directives import conflicts, depends_on
from spack.operating_systems.mac_os import macos_version
-from spack.package import PackageBase, run_after, run_before
+from spack.package_base import PackageBase, run_after, run_before
from spack.util.executable import Executable
from spack.version import Version
diff --git a/lib/spack/spack/build_systems/cached_cmake.py b/lib/spack/spack/build_systems/cached_cmake.py
index e5e96186eb..47b95f6f89 100644
--- a/lib/spack/spack/build_systems/cached_cmake.py
+++ b/lib/spack/spack/build_systems/cached_cmake.py
@@ -8,7 +8,7 @@ import llnl.util.tty as tty
from llnl.util.filesystem import install, mkdirp
from spack.build_systems.cmake import CMakePackage
-from spack.package import run_after
+from spack.package_base import run_after
def cmake_cache_path(name, value, comment=""):
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
index 105e9d5ab8..265341344a 100644
--- a/lib/spack/spack/build_systems/cmake.py
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -18,7 +18,7 @@ from llnl.util.filesystem import working_dir
import spack.build_environment
from spack.directives import conflicts, depends_on, variant
-from spack.package import InstallError, PackageBase, run_after
+from spack.package_base import InstallError, PackageBase, run_after
from spack.util.path import convert_to_posix_path
# Regex to extract the primary generator from the CMake generator
diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py
index e8810a471f..312f2d8f46 100644
--- a/lib/spack/spack/build_systems/cuda.py
+++ b/lib/spack/spack/build_systems/cuda.py
@@ -6,7 +6,7 @@
import spack.variant
from spack.directives import conflicts, depends_on, variant
from spack.multimethod import when
-from spack.package import PackageBase
+from spack.package_base import PackageBase
class CudaPackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/gnu.py b/lib/spack/spack/build_systems/gnu.py
index 6ac38065e5..2bca018872 100644
--- a/lib/spack/spack/build_systems/gnu.py
+++ b/lib/spack/spack/build_systems/gnu.py
@@ -3,14 +3,16 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import spack.package
+from typing import Optional
+
+import spack.package_base
import spack.util.url
-class GNUMirrorPackage(spack.package.PackageBase):
+class GNUMirrorPackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for GNU packages."""
#: Path of the package in a GNU mirror
- gnu_mirror_path = None
+ gnu_mirror_path = None # type: Optional[str]
#: List of GNU mirrors used by Spack
base_mirrors = [
diff --git a/lib/spack/spack/build_systems/intel.py b/lib/spack/spack/build_systems/intel.py
index ad60456fb2..aeb3f861c1 100644
--- a/lib/spack/spack/build_systems/intel.py
+++ b/lib/spack/spack/build_systems/intel.py
@@ -26,7 +26,7 @@ from llnl.util.filesystem import (
import spack.error
from spack.build_environment import dso_suffix
-from spack.package import InstallError, PackageBase, run_after
+from spack.package_base import InstallError, PackageBase, run_after
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
from spack.util.prefix import Prefix
@@ -1115,7 +1115,7 @@ class IntelPackage(PackageBase):
raise InstallError('compilers_of_client arg required for MPI')
def setup_dependent_package(self, module, dep_spec):
- # https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_package
+ # https://spack.readthedocs.io/en/latest/spack.html#spack.package_base.PackageBase.setup_dependent_package
# Reminder: "module" refers to Python module.
# Called before the install() method of dependents.
diff --git a/lib/spack/spack/build_systems/lua.py b/lib/spack/spack/build_systems/lua.py
index 3a836de012..ed71aac9e9 100644
--- a/lib/spack/spack/build_systems/lua.py
+++ b/lib/spack/spack/build_systems/lua.py
@@ -10,7 +10,7 @@ from llnl.util.filesystem import find
from spack.directives import depends_on, extends
from spack.multimethod import when
-from spack.package import PackageBase
+from spack.package_base import PackageBase
from spack.util.executable import Executable
diff --git a/lib/spack/spack/build_systems/makefile.py b/lib/spack/spack/build_systems/makefile.py
index 75574a6130..be5986da33 100644
--- a/lib/spack/spack/build_systems/makefile.py
+++ b/lib/spack/spack/build_systems/makefile.py
@@ -11,7 +11,7 @@ import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
from spack.directives import conflicts
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class MakefilePackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/maven.py b/lib/spack/spack/build_systems/maven.py
index bd0b36655f..ffa67346ab 100644
--- a/lib/spack/spack/build_systems/maven.py
+++ b/lib/spack/spack/build_systems/maven.py
@@ -7,7 +7,7 @@
from llnl.util.filesystem import install_tree, working_dir
from spack.directives import depends_on
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
from spack.util.executable import which
diff --git a/lib/spack/spack/build_systems/meson.py b/lib/spack/spack/build_systems/meson.py
index 1baeca171a..9e3bbf20c8 100644
--- a/lib/spack/spack/build_systems/meson.py
+++ b/lib/spack/spack/build_systems/meson.py
@@ -11,7 +11,7 @@ from typing import List # novm
from llnl.util.filesystem import working_dir
from spack.directives import depends_on, variant
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class MesonPackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/octave.py b/lib/spack/spack/build_systems/octave.py
index a7c6dc570b..13dd864e3b 100644
--- a/lib/spack/spack/build_systems/octave.py
+++ b/lib/spack/spack/build_systems/octave.py
@@ -6,7 +6,7 @@
import inspect
from spack.directives import extends
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class OctavePackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py
index d8120b5293..76c69acd5e 100644
--- a/lib/spack/spack/build_systems/oneapi.py
+++ b/lib/spack/spack/build_systems/oneapi.py
@@ -14,7 +14,7 @@ from os.path import basename, dirname, isdir
from llnl.util.filesystem import find_headers, find_libraries, join_path
-from spack.package import Package
+from spack.package_base import Package
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
diff --git a/lib/spack/spack/build_systems/perl.py b/lib/spack/spack/build_systems/perl.py
index ab5f7a4d46..60530abd05 100644
--- a/lib/spack/spack/build_systems/perl.py
+++ b/lib/spack/spack/build_systems/perl.py
@@ -10,7 +10,7 @@ import os
from llnl.util.filesystem import filter_file
from spack.directives import extends
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
from spack.util.executable import Executable
diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py
index fd836afe92..d72b9c5a8d 100644
--- a/lib/spack/spack/build_systems/python.py
+++ b/lib/spack/spack/build_systems/python.py
@@ -6,6 +6,7 @@ import inspect
import os
import re
import shutil
+from typing import Optional
import llnl.util.tty as tty
from llnl.util.filesystem import (
@@ -19,13 +20,13 @@ from llnl.util.filesystem import (
from llnl.util.lang import match_predicate
from spack.directives import depends_on, extends
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class PythonPackage(PackageBase):
"""Specialized class for packages that are built using pip."""
#: Package name, version, and extension on PyPI
- pypi = None
+ pypi = None # type: Optional[str]
maintainers = ['adamjstewart']
@@ -46,7 +47,7 @@ class PythonPackage(PackageBase):
# package manually
depends_on('py-wheel', type='build')
- py_namespace = None
+ py_namespace = None # type: Optional[str]
@staticmethod
def _std_args(cls):
diff --git a/lib/spack/spack/build_systems/qmake.py b/lib/spack/spack/build_systems/qmake.py
index 3272602f12..b2330ce963 100644
--- a/lib/spack/spack/build_systems/qmake.py
+++ b/lib/spack/spack/build_systems/qmake.py
@@ -9,7 +9,7 @@ import inspect
from llnl.util.filesystem import working_dir
from spack.directives import depends_on
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class QMakePackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/r.py b/lib/spack/spack/build_systems/r.py
index 0c0cb48bbc..b1df2f594d 100644
--- a/lib/spack/spack/build_systems/r.py
+++ b/lib/spack/spack/build_systems/r.py
@@ -5,9 +5,10 @@
import inspect
+from typing import Optional
from spack.directives import extends
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class RPackage(PackageBase):
@@ -28,10 +29,10 @@ class RPackage(PackageBase):
# package attributes that can be expanded to set the homepage, url,
# list_url, and git values
# For CRAN packages
- cran = None
+ cran = None # type: Optional[str]
# For Bioconductor packages
- bioc = None
+ bioc = None # type: Optional[str]
maintainers = ['glennpj']
diff --git a/lib/spack/spack/build_systems/racket.py b/lib/spack/spack/build_systems/racket.py
index 5bb4026cf4..1c9e8e84d8 100644
--- a/lib/spack/spack/build_systems/racket.py
+++ b/lib/spack/spack/build_systems/racket.py
@@ -3,13 +3,14 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
+from typing import Optional
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
from spack.directives import extends
-from spack.package import PackageBase
+from spack.package_base import PackageBase
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError
@@ -36,8 +37,8 @@ class RacketPackage(PackageBase):
extends('racket')
pkgs = False
- subdirectory = None
- name = None
+ subdirectory = None # type: Optional[str]
+ name = None # type: Optional[str]
parallel = True
@property
diff --git a/lib/spack/spack/build_systems/rocm.py b/lib/spack/spack/build_systems/rocm.py
index 65c51ff749..9a31c579fb 100644
--- a/lib/spack/spack/build_systems/rocm.py
+++ b/lib/spack/spack/build_systems/rocm.py
@@ -77,7 +77,7 @@
import spack.variant
from spack.directives import conflicts, depends_on, variant
-from spack.package import PackageBase
+from spack.package_base import PackageBase
class ROCmPackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/ruby.py b/lib/spack/spack/build_systems/ruby.py
index 1fa14e923a..cec1116477 100644
--- a/lib/spack/spack/build_systems/ruby.py
+++ b/lib/spack/spack/build_systems/ruby.py
@@ -7,7 +7,7 @@ import glob
import inspect
from spack.directives import extends
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class RubyPackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/scons.py b/lib/spack/spack/build_systems/scons.py
index 2c189840c5..55ae072085 100644
--- a/lib/spack/spack/build_systems/scons.py
+++ b/lib/spack/spack/build_systems/scons.py
@@ -7,7 +7,7 @@
import inspect
from spack.directives import depends_on
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class SConsPackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/sip.py b/lib/spack/spack/build_systems/sip.py
index b4be6ed5f2..bb039b2600 100644
--- a/lib/spack/spack/build_systems/sip.py
+++ b/lib/spack/spack/build_systems/sip.py
@@ -11,7 +11,7 @@ import llnl.util.tty as tty
from llnl.util.filesystem import find, join_path, working_dir
from spack.directives import depends_on, extends
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class SIPPackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/sourceforge.py b/lib/spack/spack/build_systems/sourceforge.py
index ea0b408707..38032f6b1b 100644
--- a/lib/spack/spack/build_systems/sourceforge.py
+++ b/lib/spack/spack/build_systems/sourceforge.py
@@ -3,15 +3,17 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import spack.package
+from typing import Optional
+
+import spack.package_base
import spack.util.url
-class SourceforgePackage(spack.package.PackageBase):
+class SourceforgePackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for Sourceforge
packages."""
#: Path of the package in a Sourceforge mirror
- sourceforge_mirror_path = None
+ sourceforge_mirror_path = None # type: Optional[str]
#: List of Sourceforge mirrors used by Spack
base_mirrors = [
diff --git a/lib/spack/spack/build_systems/sourceware.py b/lib/spack/spack/build_systems/sourceware.py
index bdf42e4947..a26ea345ee 100644
--- a/lib/spack/spack/build_systems/sourceware.py
+++ b/lib/spack/spack/build_systems/sourceware.py
@@ -2,16 +2,17 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from typing import Optional
-import spack.package
+import spack.package_base
import spack.util.url
-class SourcewarePackage(spack.package.PackageBase):
+class SourcewarePackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for Sourceware.org
packages."""
#: Path of the package in a Sourceware mirror
- sourceware_mirror_path = None
+ sourceware_mirror_path = None # type: Optional[str]
#: List of Sourceware mirrors used by Spack
base_mirrors = [
diff --git a/lib/spack/spack/build_systems/waf.py b/lib/spack/spack/build_systems/waf.py
index f40ec9f258..8a65f48ec4 100644
--- a/lib/spack/spack/build_systems/waf.py
+++ b/lib/spack/spack/build_systems/waf.py
@@ -9,7 +9,7 @@ import inspect
from llnl.util.filesystem import working_dir
from spack.directives import depends_on
-from spack.package import PackageBase, run_after
+from spack.package_base import PackageBase, run_after
class WafPackage(PackageBase):
diff --git a/lib/spack/spack/build_systems/xorg.py b/lib/spack/spack/build_systems/xorg.py
index 0d0f80a7c5..246e830254 100644
--- a/lib/spack/spack/build_systems/xorg.py
+++ b/lib/spack/spack/build_systems/xorg.py
@@ -3,15 +3,17 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-import spack.package
+from typing import Optional
+
+import spack.package_base
import spack.util.url
-class XorgPackage(spack.package.PackageBase):
+class XorgPackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for x.org
packages."""
#: Path of the package in a x.org mirror
- xorg_mirror_path = None
+ xorg_mirror_path = None # type: Optional[str]
#: List of x.org mirrors used by Spack
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index db431c5ed9..f88417dd2f 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -14,7 +14,7 @@ import spack.cmd.common.arguments as arguments
import spack.repo
import spack.stage
import spack.util.crypto
-from spack.package import preferred_version
+from spack.package_base import preferred_version
from spack.util.naming import valid_fully_qualified_module_name
from spack.version import Version, ver
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 94ddc7d292..1fcbb26f5d 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -57,7 +57,7 @@ package_template = '''\
# See the Spack documentation for more information on packaging.
# ----------------------------------------------------------------------------
-from spack import *
+from spack.package import *
class {class_name}({base_class_name}):
diff --git a/lib/spack/spack/cmd/dependencies.py b/lib/spack/spack/cmd/dependencies.py
index 23aa12d9c5..b273c74297 100644
--- a/lib/spack/spack/cmd/dependencies.py
+++ b/lib/spack/spack/cmd/dependencies.py
@@ -11,7 +11,7 @@ from llnl.util.tty.colify import colify
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
-import spack.package
+import spack.package_base
import spack.repo
import spack.store
@@ -57,7 +57,7 @@ def dependencies(parser, args):
else:
spec = specs[0]
- dependencies = spack.package.possible_dependencies(
+ dependencies = spack.package_base.possible_dependencies(
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
diff --git a/lib/spack/spack/cmd/external.py b/lib/spack/spack/cmd/external.py
index 8b9c845c3e..a307b5e1ef 100644
--- a/lib/spack/spack/cmd/external.py
+++ b/lib/spack/spack/cmd/external.py
@@ -200,7 +200,7 @@ def external_list(args):
list(spack.repo.path.all_packages())
# Print all the detectable packages
tty.msg("Detectable packages per repository")
- for namespace, pkgs in sorted(spack.package.detectable_packages.items()):
+ for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
print("Repository:", namespace)
colify.colify(pkgs, indent=4, output=sys.stdout)
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 7a086dc416..31ea53f384 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -18,7 +18,7 @@ import spack.cmd.common.arguments as arguments
import spack.fetch_strategy as fs
import spack.repo
import spack.spec
-from spack.package import has_test_method, preferred_version
+from spack.package_base import has_test_method, preferred_version
description = 'get detailed information on a particular package'
section = 'basic'
@@ -269,14 +269,14 @@ def print_tests(pkg):
names = []
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
if has_test_method(pkg_cls):
- pkg_base = spack.package.PackageBase
+ pkg_base = spack.package_base.PackageBase
test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
issubclass(cls, pkg_base) and cls.test != pkg_base.test]
test_pkgs = list(set(test_pkgs))
names.extend([(test.split()[1]).lower() for test in test_pkgs])
# TODO Refactor START
- # Use code from package.py's test_process IF this functionality is
+ # Use code from package_base.py's test_process IF this functionality is
# accepted.
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index bab245f736..2781deb60c 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -302,7 +302,7 @@ environment variables:
)
reporter = spack.report.collect_info(
- spack.package.PackageInstaller, '_install_task', args.log_format, args)
+ spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
if args.log_file:
reporter.filename = args.log_file
diff --git a/lib/spack/spack/cmd/mark.py b/lib/spack/spack/cmd/mark.py
index ae25ccbe88..da31917c39 100644
--- a/lib/spack/spack/cmd/mark.py
+++ b/lib/spack/spack/cmd/mark.py
@@ -12,7 +12,7 @@ from llnl.util import tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.error
-import spack.package
+import spack.package_base
import spack.repo
import spack.store
from spack.database import InstallStatuses
diff --git a/lib/spack/spack/cmd/solve.py b/lib/spack/spack/cmd/solve.py
index ba5cc218cb..29f96ba485 100644
--- a/lib/spack/spack/cmd/solve.py
+++ b/lib/spack/spack/cmd/solve.py
@@ -18,7 +18,7 @@ import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment
import spack.hash_types as ht
-import spack.package
+import spack.package_base
import spack.solver.asp as asp
description = "concretize a specs using an ASP solver"
diff --git a/lib/spack/spack/cmd/style.py b/lib/spack/spack/cmd/style.py
index 658e7e98cd..5c31228391 100644
--- a/lib/spack/spack/cmd/style.py
+++ b/lib/spack/spack/cmd/style.py
@@ -65,7 +65,7 @@ def is_package(f):
packages, since we allow `from spack import *` and poking globals
into packages.
"""
- return f.startswith("var/spack/repos/")
+ return f.startswith("var/spack/repos/") and f.endswith('package.py')
#: decorator for adding tools to the list
@@ -236,7 +236,7 @@ def rewrite_and_print_output(
continue
if not args.root_relative and re_obj:
line = re_obj.sub(translate, line)
- print(" " + line)
+ print(line)
def print_style_header(file_list, args):
@@ -290,20 +290,26 @@ def run_flake8(flake8_cmd, file_list, args):
@tool("mypy")
def run_mypy(mypy_cmd, file_list, args):
# always run with config from running spack prefix
- mypy_args = [
+ common_mypy_args = [
"--config-file", os.path.join(spack.paths.prefix, "pyproject.toml"),
- "--package", "spack",
- "--package", "llnl",
"--show-error-codes",
]
- # not yet, need other updates to enable this
- # if any([is_package(f) for f in file_list]):
- # mypy_args.extend(["--package", "packages"])
+ mypy_arg_sets = [common_mypy_args + [
+ "--package", "spack",
+ "--package", "llnl",
+ ]]
+ if 'SPACK_MYPY_CHECK_PACKAGES' in os.environ:
+ mypy_arg_sets.append(common_mypy_args + [
+ '--package', 'packages',
+ '--disable-error-code', 'no-redef',
+ ])
- output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
- returncode = mypy_cmd.returncode
+ returncode = 0
+ for mypy_args in mypy_arg_sets:
+ output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
+ returncode |= mypy_cmd.returncode
- rewrite_and_print_output(output, args)
+ rewrite_and_print_output(output, args)
print_tool_result("mypy", returncode)
return returncode
@@ -318,16 +324,29 @@ def run_isort(isort_cmd, file_list, args):
pat = re.compile("ERROR: (.*) Imports are incorrectly sorted")
replacement = "ERROR: {0} Imports are incorrectly sorted"
- returncode = 0
- for chunk in grouper(file_list, 100):
- packed_args = isort_args + tuple(chunk)
- output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
- returncode |= isort_cmd.returncode
+ returncode = [0]
- rewrite_and_print_output(output, args, pat, replacement)
+ def process_files(file_list, is_args):
+ for chunk in grouper(file_list, 100):
+ packed_args = is_args + tuple(chunk)
+ output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
+ returncode[0] |= isort_cmd.returncode
- print_tool_result("isort", returncode)
- return returncode
+ rewrite_and_print_output(output, args, pat, replacement)
+
+ packages_isort_args = ('--rm', 'spack', '--rm', 'spack.pkgkit', '--rm',
+ 'spack.package_defs', '-a', 'from spack.package import *')
+ packages_isort_args = packages_isort_args + isort_args
+
+ # packages
+ process_files(filter(is_package, file_list),
+ packages_isort_args)
+ # non-packages
+ process_files(filter(lambda f: not is_package(f), file_list),
+ isort_args)
+
+ print_tool_result("isort", returncode[0])
+ return returncode[0]
@tool("black")
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index 358fcb8846..51d23da429 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -20,7 +20,7 @@ import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.install_test
-import spack.package
+import spack.package_base
import spack.repo
import spack.report
@@ -189,7 +189,7 @@ environment variables:
# Set up reporter
setattr(args, 'package', [s.format() for s in test_suite.specs])
reporter = spack.report.collect_info(
- spack.package.PackageBase, 'do_test', args.log_format, args)
+ spack.package_base.PackageBase, 'do_test', args.log_format, args)
if not reporter.filename:
if args.log_file:
if os.path.isabs(args.log_file):
@@ -217,7 +217,7 @@ def test_list(args):
else set()
def has_test_and_tags(pkg_class):
- return spack.package.has_test_method(pkg_class) and \
+ return spack.package_base.has_test_method(pkg_class) and \
(not args.tag or pkg_class.name in tagged)
if args.list_all:
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 37fe98d0d0..643be0044b 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -15,7 +15,7 @@ import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.error
-import spack.package
+import spack.package_base
import spack.repo
import spack.store
from spack.database import InstallStatuses
@@ -221,7 +221,7 @@ def do_uninstall(env, specs, force):
except spack.repo.UnknownEntityError:
# The package.py file has gone away -- but still
# want to uninstall.
- spack.package.Package.uninstall_by_spec(item, force=True)
+ spack.package_base.Package.uninstall_by_spec(item, force=True)
# A package is ready to be uninstalled when nothing else references it,
# unless we are requested to force uninstall it.
diff --git a/lib/spack/spack/cmd/url.py b/lib/spack/spack/cmd/url.py
index d7710d684a..97fa300a3d 100644
--- a/lib/spack/spack/cmd/url.py
+++ b/lib/spack/spack/cmd/url.py
@@ -422,7 +422,7 @@ def url_list_parsing(args, urls, url, pkg):
urls (set): List of URLs that have already been added
url (str or None): A URL to potentially add to ``urls`` depending on
``args``
- pkg (spack.package.PackageBase): The Spack package
+ pkg (spack.package_base.PackageBase): The Spack package
Returns:
set: The updated set of ``urls``
@@ -470,7 +470,7 @@ def name_parsed_correctly(pkg, name):
"""Determine if the name of a package was correctly parsed.
Args:
- pkg (spack.package.PackageBase): The Spack package
+ pkg (spack.package_base.PackageBase): The Spack package
name (str): The name that was extracted from the URL
Returns:
@@ -487,7 +487,7 @@ def version_parsed_correctly(pkg, version):
"""Determine if the version of a package was correctly parsed.
Args:
- pkg (spack.package.PackageBase): The Spack package
+ pkg (spack.package_base.PackageBase): The Spack package
version (str): The version that was extracted from the URL
Returns:
diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py
index a8f0e82a87..813f235868 100644
--- a/lib/spack/spack/detection/common.py
+++ b/lib/spack/spack/detection/common.py
@@ -240,7 +240,7 @@ def compute_windows_program_path_for_package(pkg):
program files location, return list of best guesses
Args:
- pkg (spack.package.Package): package for which
+ pkg (spack.package_base.Package): package for which
Program Files location is to be computed
"""
if not is_windows:
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 378209308f..d1da1e6974 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -1556,7 +1556,7 @@ def _extrapolate(pkg, version):
try:
return URLFetchStrategy(pkg.url_for_version(version),
fetch_options=pkg.fetch_options)
- except spack.package.NoURLError:
+ except spack.package_base.NoURLError:
msg = ("Can't extrapolate a URL for version %s "
"because package %s defines no URLs")
raise ExtrapolationError(msg % (version, pkg.name))
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index 2e430b8a07..45ee08104b 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -50,7 +50,7 @@ import spack.compilers
import spack.error
import spack.hooks
import spack.monitor
-import spack.package
+import spack.package_base
import spack.package_prefs as prefs
import spack.repo
import spack.store
@@ -103,7 +103,7 @@ def _check_last_phase(pkg):
package already.
Args:
- pkg (spack.package.PackageBase): the package being installed
+ pkg (spack.package_base.PackageBase): the package being installed
Raises:
``BadInstallPhase`` if stop_before or last phase is invalid
@@ -125,7 +125,7 @@ def _handle_external_and_upstream(pkg, explicit):
database if it is external package.
Args:
- pkg (spack.package.Package): the package whose installation is under
+ pkg (spack.package_base.Package): the package whose installation is under
consideration
explicit (bool): the package was explicitly requested by the user
Return:
@@ -265,7 +265,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
Extract the package from binary cache
Args:
- pkg (spack.package.PackageBase): the package to install from the binary cache
+ pkg (spack.package_base.PackageBase): package to install from the binary cache
cache_only (bool): only extract from binary cache
explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
@@ -355,7 +355,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
Process the binary cache tarball.
Args:
- pkg (spack.package.PackageBase): the package being installed
+ pkg (spack.package_base.PackageBase): the package being installed
binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
@@ -394,7 +394,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
Try to extract the package from binary cache.
Args:
- pkg (spack.package.PackageBase): the package to be extracted from binary cache
+ pkg (spack.package_base.PackageBase): package to be extracted from binary cache
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@@ -530,7 +530,7 @@ def log(pkg):
Copy provenance into the install directory on success
Args:
- pkg (spack.package.Package): the package that was built and installed
+ pkg (spack.package_base.Package): the package that was built and installed
"""
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
@@ -616,7 +616,7 @@ def package_id(pkg):
and packages for combinatorial environments.
Args:
- pkg (spack.package.PackageBase): the package from which the identifier is
+ pkg (spack.package_base.PackageBase): the package from which the identifier is
derived
"""
if not pkg.spec.concrete:
@@ -769,7 +769,7 @@ class PackageInstaller(object):
Args:
compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler
- pkgs (spack.package.PackageBase): the package with possible compiler
+ pkgs (spack.package_base.PackageBase): the package with possible compiler
dependencies
request (BuildRequest): the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
@@ -786,7 +786,7 @@ class PackageInstaller(object):
Creates and queus the initial build task for the package.
Args:
- pkg (spack.package.Package): the package to be built and installed
+ pkg (spack.package_base.Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
@@ -968,7 +968,7 @@ class PackageInstaller(object):
Cleanup the build task for the spec
Args:
- pkg (spack.package.PackageBase): the package being installed
+ pkg (spack.package_base.PackageBase): the package being installed
"""
self._remove_task(package_id(pkg))
@@ -982,7 +982,7 @@ class PackageInstaller(object):
already locked.
Args:
- pkg (spack.package.PackageBase): the package being locally installed
+ pkg (spack.package_base.PackageBase): the package being locally installed
"""
pkg_id = package_id(pkg)
pre = "{0} cannot be installed locally:".format(pkg_id)
@@ -1014,7 +1014,8 @@ class PackageInstaller(object):
Args:
lock_type (str): 'read' for a read lock, 'write' for a write lock
- pkg (spack.package.PackageBase): the package whose spec is being installed
+ pkg (spack.package_base.PackageBase): the package whose spec is being
+ installed
Return:
(lock_type, lock) tuple where lock will be None if it could not
@@ -1228,7 +1229,7 @@ class PackageInstaller(object):
# Create a child process to do the actual installation.
# Preserve verbosity settings across installs.
- spack.package.PackageBase._verbose = (
+ spack.package_base.PackageBase._verbose = (
spack.build_environment.start_build_process(
pkg, build_process, install_args)
)
@@ -1373,7 +1374,7 @@ class PackageInstaller(object):
Write a small metadata file with the current spack environment.
Args:
- pkg (spack.package.Package): the package to be built and installed
+ pkg (spack.package_base.Package): the package to be built and installed
"""
if not os.path.exists(pkg.spec.prefix):
tty.debug('Creating the installation directory {0}'.format(pkg.spec.prefix))
@@ -1447,7 +1448,7 @@ class PackageInstaller(object):
known dependents.
Args:
- pkg (spack.package.Package): Package that has been installed locally,
+ pkg (spack.package_base.Package): Package that has been installed locally,
externally or upstream
dependent_ids (list or None): list of the package's
dependent ids, or None if the dependent ids are limited to
@@ -1536,7 +1537,7 @@ class PackageInstaller(object):
Install the requested package(s) and or associated dependencies.
Args:
- pkg (spack.package.Package): the package to be built and installed"""
+ pkg (spack.package_base.Package): the package to be built and installed"""
self._init_queue()
fail_fast_err = 'Terminating after first install failure'
@@ -1788,7 +1789,7 @@ class BuildProcessInstaller(object):
process in the build.
Arguments:
- pkg (spack.package.PackageBase) the package being installed.
+ pkg (spack.package_base.PackageBase) the package being installed.
install_args (dict) arguments to do_install() from parent process.
"""
@@ -1848,8 +1849,8 @@ class BuildProcessInstaller(object):
# get verbosity from do_install() parameter or saved value
self.echo = self.verbose
- if spack.package.PackageBase._verbose is not None:
- self.echo = spack.package.PackageBase._verbose
+ if spack.package_base.PackageBase._verbose is not None:
+ self.echo = spack.package_base.PackageBase._verbose
self.pkg.stage.keep = self.keep_stage
@@ -2001,7 +2002,7 @@ def build_process(pkg, install_args):
This function's return value is returned to the parent process.
Arguments:
- pkg (spack.package.PackageBase): the package being installed.
+ pkg (spack.package_base.PackageBase): the package being installed.
install_args (dict): arguments to do_install() from parent process.
"""
@@ -2049,7 +2050,7 @@ class BuildTask(object):
Instantiate a build task for a package.
Args:
- pkg (spack.package.Package): the package to be built and installed
+ pkg (spack.package_base.Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
@@ -2062,7 +2063,7 @@ class BuildTask(object):
"""
# Ensure dealing with a package that has a concrete spec
- if not isinstance(pkg, spack.package.PackageBase):
+ if not isinstance(pkg, spack.package_base.PackageBase):
raise ValueError("{0} must be a package".format(str(pkg)))
self.pkg = pkg
@@ -2240,11 +2241,11 @@ class BuildRequest(object):
Instantiate a build request for a package.
Args:
- pkg (spack.package.Package): the package to be built and installed
+ pkg (spack.package_base.Package): the package to be built and installed
install_args (dict): the install arguments associated with ``pkg``
"""
# Ensure dealing with a package that has a concrete spec
- if not isinstance(pkg, spack.package.PackageBase):
+ if not isinstance(pkg, spack.package_base.PackageBase):
raise ValueError("{0} must be a package".format(str(pkg)))
self.pkg = pkg
@@ -2314,7 +2315,7 @@ class BuildRequest(object):
"""Determine the required dependency types for the associated package.
Args:
- pkg (spack.package.PackageBase): explicit or implicit package being
+ pkg (spack.package_base.PackageBase): explicit or implicit package being
installed
Returns:
@@ -2337,7 +2338,7 @@ class BuildRequest(object):
"""Determine if the tests should be run for the provided packages
Args:
- pkg (spack.package.PackageBase): explicit or implicit package being
+ pkg (spack.package_base.PackageBase): explicit or implicit package being
installed
Returns:
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 549c71f52b..84053d1cea 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -3,3078 +3,83 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-"""This is where most of the action happens in Spack.
+# flake8: noqa: F401
+"""spack.util.package is a set of useful build tools and directives for packages.
-The spack package class structure is based strongly on Homebrew
-(http://brew.sh/), mainly because Homebrew makes it very easy to create
-packages.
+Everything in this module is automatically imported into Spack package files.
"""
-
-import base64
-import collections
-import contextlib
-import copy
-import functools
-import glob
-import hashlib
-import inspect
-import os
-import re
-import shutil
-import sys
-import textwrap
-import time
-import traceback
-import types
-import warnings
-from typing import Any, Callable, Dict, List, Optional # novm
-
-import six
-
-import llnl.util.filesystem as fsys
-import llnl.util.tty as tty
-from llnl.util.lang import memoized, nullcontext
-from llnl.util.link_tree import LinkTree
-
-import spack.compilers
-import spack.config
-import spack.dependency
-import spack.directives
-import spack.directory_layout
-import spack.environment
-import spack.error
-import spack.fetch_strategy as fs
-import spack.hooks
-import spack.mirror
-import spack.mixins
-import spack.multimethod
-import spack.paths
-import spack.repo
-import spack.store
-import spack.url
-import spack.util.environment
-import spack.util.path
-import spack.util.web
-from spack.filesystem_view import YamlFilesystemView
-from spack.install_test import TestFailure, TestSuite
-from spack.installer import InstallError, PackageInstaller
-from spack.stage import ResourceStage, Stage, StageComposite, stage_prefix
-from spack.util.executable import ProcessError, which
-from spack.util.package_hash import package_hash
-from spack.util.prefix import Prefix
-from spack.version import Version
-
-"""Allowed URL schemes for spack packages."""
-_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
-
-
-# Filename for the Spack build/install log.
-_spack_build_logfile = 'spack-build-out.txt'
-
-# Filename for the Spack build/install environment file.
-_spack_build_envfile = 'spack-build-env.txt'
-
-# Filename for the Spack build/install environment modifications file.
-_spack_build_envmodsfile = 'spack-build-env-mods.txt'
-
-# Filename for the Spack install phase-time test log.
-_spack_install_test_log = 'install-time-test-log.txt'
-
-# Filename of json with total build and phase times (seconds)
-_spack_times_log = 'install_times.json'
-
-# Filename for the Spack configure args file.
-_spack_configure_argsfile = 'spack-configure-args.txt'
-
-
-def preferred_version(pkg):
- """
- Returns a sorted list of the preferred versions of the package.
-
- Arguments:
- pkg (Package): The package whose versions are to be assessed.
- """
- # Here we sort first on the fact that a version is marked
- # as preferred in the package, then on the fact that the
- # version is not develop, then lexicographically
- key_fn = lambda v: (pkg.versions[v].get('preferred', False),
- not v.isdevelop(),
- v)
- return sorted(pkg.versions, key=key_fn).pop()
-
-
-class InstallPhase(object):
- """Manages a single phase of the installation.
-
- This descriptor stores at creation time the name of the method it should
- search for execution. The method is retrieved at __get__ time, so that
- it can be overridden by subclasses of whatever class declared the phases.
-
- It also provides hooks to execute arbitrary callbacks before and after
- the phase.
- """
-
- def __init__(self, name):
- self.name = name
- self.run_before = []
- self.run_after = []
-
- def __get__(self, instance, owner):
- # The caller is a class that is trying to customize
- # my behavior adding something
- if instance is None:
- return self
- # If instance is there the caller wants to execute the
- # install phase, thus return a properly set wrapper
- phase = getattr(instance, self.name)
-
- @functools.wraps(phase)
- def phase_wrapper(spec, prefix):
- # Check instance attributes at the beginning of a phase
- self._on_phase_start(instance)
- # Execute phase pre-conditions,
- # and give them the chance to fail
- for callback in self.run_before:
- callback(instance)
- phase(spec, prefix)
- # Execute phase sanity_checks,
- # and give them the chance to fail
- for callback in self.run_after:
- callback(instance)
- # Check instance attributes at the end of a phase
- self._on_phase_exit(instance)
- return phase_wrapper
-
- def _on_phase_start(self, instance):
- # If a phase has a matching stop_before_phase attribute,
- # stop the installation process raising a StopPhase
- if getattr(instance, 'stop_before_phase', None) == self.name:
- from spack.build_environment import StopPhase
- raise StopPhase('Stopping before \'{0}\' phase'.format(self.name))
-
- def _on_phase_exit(self, instance):
- # If a phase has a matching last_phase attribute,
- # stop the installation process raising a StopPhase
- if getattr(instance, 'last_phase', None) == self.name:
- from spack.build_environment import StopPhase
- raise StopPhase('Stopping at \'{0}\' phase'.format(self.name))
-
- def copy(self):
- try:
- return copy.deepcopy(self)
- except TypeError:
- # This bug-fix was not back-ported in Python 2.6
- # http://bugs.python.org/issue1515
- other = InstallPhase(self.name)
- other.run_before.extend(self.run_before)
- other.run_after.extend(self.run_after)
- return other
-
-
-#: Registers which are the detectable packages, by repo and package name
-#: Need a pass of package repositories to be filled.
-detectable_packages = collections.defaultdict(list)
-
-
-class DetectablePackageMeta(object):
- """Check if a package is detectable and add default implementations
- for the detection function.
- """
- def __init__(cls, name, bases, attr_dict):
- if hasattr(cls, 'executables') and hasattr(cls, 'libraries'):
- msg = "a package can have either an 'executables' or 'libraries' attribute"
- msg += " [package '{0.name}' defines both]"
- raise spack.error.SpackError(msg.format(cls))
-
- # On windows, extend the list of regular expressions to look for
- # filenames ending with ".exe"
- # (in some cases these regular expressions include "$" to avoid
- # pulling in filenames with unexpected suffixes, but this allows
- # for example detecting "foo.exe" when the package writer specified
- # that "foo" was a possible executable.
-
- # If a package has the executables or libraries attribute then it's
- # assumed to be detectable
- if hasattr(cls, 'executables') or hasattr(cls, 'libraries'):
- @property
- def platform_executables(self):
- def to_windows_exe(exe):
- if exe.endswith('$'):
- exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
- else:
- exe += spack.util.path.win_exe_ext()
- return exe
- plat_exe = []
- if hasattr(self, 'executables'):
- for exe in self.executables:
- if sys.platform == 'win32':
- exe = to_windows_exe(exe)
- plat_exe.append(exe)
- return plat_exe
-
- @classmethod
- def determine_spec_details(cls, prefix, objs_in_prefix):
- """Allow ``spack external find ...`` to locate installations.
-
- Args:
- prefix (str): the directory containing the executables
- or libraries
- objs_in_prefix (set): the executables or libraries that
- match the regex
-
- Returns:
- The list of detected specs for this package
- """
- objs_by_version = collections.defaultdict(list)
- # The default filter function is the identity function for the
- # list of executables
- filter_fn = getattr(cls, 'filter_detected_exes',
- lambda x, exes: exes)
- objs_in_prefix = filter_fn(prefix, objs_in_prefix)
- for obj in objs_in_prefix:
- try:
- version_str = cls.determine_version(obj)
- if version_str:
- objs_by_version[version_str].append(obj)
- except Exception as e:
- msg = ('An error occurred when trying to detect '
- 'the version of "{0}" [{1}]')
- tty.debug(msg.format(obj, str(e)))
-
- specs = []
- for version_str, objs in objs_by_version.items():
- variants = cls.determine_variants(objs, version_str)
- # Normalize output to list
- if not isinstance(variants, list):
- variants = [variants]
-
- for variant in variants:
- if isinstance(variant, six.string_types):
- variant = (variant, {})
- variant_str, extra_attributes = variant
- spec_str = '{0}@{1} {2}'.format(
- cls.name, version_str, variant_str
- )
-
- # Pop a few reserved keys from extra attributes, since
- # they have a different semantics
- external_path = extra_attributes.pop('prefix', None)
- external_modules = extra_attributes.pop(
- 'modules', None
- )
- try:
- spec = spack.spec.Spec(
- spec_str,
- external_path=external_path,
- external_modules=external_modules
- )
- except Exception as e:
- msg = 'Parsing failed [spec_str="{0}", error={1}]'
- tty.debug(msg.format(spec_str, str(e)))
- else:
- specs.append(spack.spec.Spec.from_detection(
- spec, extra_attributes=extra_attributes
- ))
-
- return sorted(specs)
-
- @classmethod
- def determine_variants(cls, objs, version_str):
- return ''
-
- # Register the class as a detectable package
- detectable_packages[cls.namespace].append(cls.name)
-
- # Attach function implementations to the detectable class
- default = False
- if not hasattr(cls, 'determine_spec_details'):
- default = True
- cls.determine_spec_details = determine_spec_details
-
- if default and not hasattr(cls, 'determine_version'):
- msg = ('the package "{0}" in the "{1}" repo needs to define'
- ' the "determine_version" method to be detectable')
- NotImplementedError(msg.format(cls.name, cls.namespace))
-
- if default and not hasattr(cls, 'determine_variants'):
- cls.determine_variants = determine_variants
-
- # This function should not be overridden by subclasses,
- # as it is not designed for bespoke pkg detection but rather
- # on a per-platform basis
- if 'platform_executables' in cls.__dict__.keys():
- raise PackageError("Packages should not override platform_executables")
- cls.platform_executables = platform_executables
-
- super(DetectablePackageMeta, cls).__init__(name, bases, attr_dict)
-
-
-class PackageMeta(
- DetectablePackageMeta,
- spack.directives.DirectiveMeta,
- spack.mixins.PackageMixinsMeta,
- spack.multimethod.MultiMethodMeta
-):
- """
- Package metaclass for supporting directives (e.g., depends_on) and phases
- """
- phase_fmt = '_InstallPhase_{0}'
-
- # These are accessed only through getattr, by name
- _InstallPhase_run_before = {} # type: Dict[str, List[Callable]]
- _InstallPhase_run_after = {} # type: Dict[str, List[Callable]]
-
- def __new__(cls, name, bases, attr_dict):
- """
- Instance creation is preceded by phase attribute transformations.
-
- Conveniently transforms attributes to permit extensible phases by
- iterating over the attribute 'phases' and creating / updating private
- InstallPhase attributes in the class that will be initialized in
- __init__.
- """
- if 'phases' in attr_dict:
- # Turn the strings in 'phases' into InstallPhase instances
- # and add them as private attributes
- _InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict['phases']] # NOQA: ignore=E501
- for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict['phases']): # NOQA: ignore=E501
- attr_dict[phase_name] = InstallPhase(callback_name)
- attr_dict['_InstallPhase_phases'] = _InstallPhase_phases
-
- def _flush_callbacks(check_name):
- # Name of the attribute I am going to check it exists
- check_attr = PackageMeta.phase_fmt.format(check_name)
- checks = getattr(cls, check_attr)
- if checks:
- for phase_name, funcs in checks.items():
- phase_attr = PackageMeta.phase_fmt.format(phase_name)
- try:
- # Search for the phase in the attribute dictionary
- phase = attr_dict[phase_attr]
- except KeyError:
- # If it is not there it's in the bases
- # and we added a check. We need to copy
- # and extend
- for base in bases:
- phase = getattr(base, phase_attr, None)
- if phase is not None:
- break
-
- phase = attr_dict[phase_attr] = phase.copy()
- getattr(phase, check_name).extend(funcs)
- # Clear the attribute for the next class
- setattr(cls, check_attr, {})
-
- _flush_callbacks('run_before')
- _flush_callbacks('run_after')
-
- # Reset names for packages that inherit from another
- # package with a different name
- attr_dict['_name'] = None
-
- return super(PackageMeta, cls).__new__(cls, name, bases, attr_dict)
-
- @staticmethod
- def register_callback(check_type, *phases):
- def _decorator(func):
- attr_name = PackageMeta.phase_fmt.format(check_type)
- check_list = getattr(PackageMeta, attr_name)
- for item in phases:
- checks = check_list.setdefault(item, [])
- checks.append(func)
- setattr(PackageMeta, attr_name, check_list)
- return func
- return _decorator
-
- @property
- def package_dir(self):
- """Directory where the package.py file lives."""
- return os.path.abspath(os.path.dirname(self.module.__file__))
-
- @property
- def module(self):
- """Module object (not just the name) that this package is defined in.
-
- We use this to add variables to package modules. This makes
- install() methods easier to write (e.g., can call configure())
- """
- return __import__(self.__module__, fromlist=[self.__name__])
-
- @property
- def namespace(self):
- """Spack namespace for the package, which identifies its repo."""
- return spack.repo.namespace_from_fullname(self.__module__)
-
- @property
- def fullname(self):
- """Name of this package, including the namespace"""
- return '%s.%s' % (self.namespace, self.name)
-
- @property
- def fullnames(self):
- """
- Fullnames for this package and any packages from which it inherits.
- """
- fullnames = []
- for cls in inspect.getmro(self):
- namespace = getattr(cls, 'namespace', None)
- if namespace:
- fullnames.append('%s.%s' % (namespace, self.name))
- if namespace == 'builtin':
- # builtin packages cannot inherit from other repos
- break
- return fullnames
-
- @property
- def name(self):
- """The name of this package.
-
- The name of a package is the name of its Python module, without
- the containing module names.
- """
- if self._name is None:
- self._name = self.module.__name__
- if '.' in self._name:
- self._name = self._name[self._name.rindex('.') + 1:]
- return self._name
-
- @property
- def global_license_dir(self):
- """Returns the directory where license files for all packages are stored."""
- return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
-
-
-def run_before(*phases):
- """Registers a method of a package to be run before a given phase"""
- return PackageMeta.register_callback('run_before', *phases)
-
-
-def run_after(*phases):
- """Registers a method of a package to be run after a given phase"""
- return PackageMeta.register_callback('run_after', *phases)
-
-
-def on_package_attributes(**attr_dict):
- """Decorator: executes instance function only if object has attr valuses.
-
- Executes the decorated method only if at the moment of calling the
- instance has attributes that are equal to certain values.
-
- Args:
- attr_dict (dict): dictionary mapping attribute names to their
- required values
- """
- def _execute_under_condition(func):
-
- @functools.wraps(func)
- def _wrapper(instance, *args, **kwargs):
- # If all the attributes have the value we require, then execute
- has_all_attributes = all(
- [hasattr(instance, key) for key in attr_dict]
- )
- if has_all_attributes:
- has_the_right_values = all(
- [getattr(instance, key) == value for key, value in attr_dict.items()] # NOQA: ignore=E501
- )
- if has_the_right_values:
- func(instance, *args, **kwargs)
- return _wrapper
-
- return _execute_under_condition
-
-
-class PackageViewMixin(object):
- """This collects all functionality related to adding installed Spack
- package to views. Packages can customize how they are added to views by
- overriding these functions.
- """
- def view_source(self):
- """The source root directory that will be added to the view: files are
- added such that their path relative to the view destination matches
- their path relative to the view source.
- """
- return self.spec.prefix
-
- def view_destination(self, view):
- """The target root directory: each file is added relative to this
- directory.
- """
- return view.get_projection_for_spec(self.spec)
-
- def view_file_conflicts(self, view, merge_map):
- """Report any files which prevent adding this package to the view. The
- default implementation looks for any files which already exist.
- Alternative implementations may allow some of the files to exist in
- the view (in this case they would be omitted from the results).
- """
- return set(dst for dst in merge_map.values() if os.path.lexists(dst))
-
- def add_files_to_view(self, view, merge_map, skip_if_exists=True):
- """Given a map of package files to destination paths in the view, add
- the files to the view. By default this adds all files. Alternative
- implementations may skip some files, for example if other packages
- linked into the view already include the file.
-
- Args:
- view (spack.filesystem_view.FilesystemView): the view that's updated
- merge_map (dict): maps absolute source paths to absolute dest paths for
- all files in from this package.
- skip_if_exists (bool): when True, don't link files in view when they
- already exist. When False, always link files, without checking
- if they already exist.
- """
- if skip_if_exists:
- for src, dst in merge_map.items():
- if not os.path.lexists(dst):
- view.link(src, dst, spec=self.spec)
- else:
- for src, dst in merge_map.items():
- view.link(src, dst, spec=self.spec)
-
- def remove_files_from_view(self, view, merge_map):
- """Given a map of package files to files currently linked in the view,
- remove the files from the view. The default implementation removes all
- files. Alternative implementations may not remove all files. For
- example if two packages include the same file, it should only be
- removed when both packages are removed.
- """
- view.remove_files(merge_map.values())
-
-
-def test_log_pathname(test_stage, spec):
- """Build the pathname of the test log file
-
- Args:
- test_stage (str): path to the test stage directory
- spec (spack.spec.Spec): instance of the spec under test
-
- Returns:
- (str): the pathname of the test log file
- """
- return os.path.join(test_stage,
- 'test-{0}-out.txt'.format(TestSuite.test_pkg_id(spec)))
-
-
-class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
- """This is the superclass for all spack packages.
-
- ***The Package class***
-
- At its core, a package consists of a set of software to be installed.
- A package may focus on a piece of software and its associated software
- dependencies or it may simply be a set, or bundle, of software. The
- former requires defining how to fetch, verify (via, e.g., sha256), build,
- and install that software and the packages it depends on, so that
- dependencies can be installed along with the package itself. The latter,
- sometimes referred to as a ``no-source`` package, requires only defining
- the packages to be built.
-
- Packages are written in pure Python.
-
- There are two main parts of a Spack package:
-
- 1. **The package class**. Classes contain ``directives``, which are
- special functions, that add metadata (versions, patches,
- dependencies, and other information) to packages (see
- ``directives.py``). Directives provide the constraints that are
- used as input to the concretizer.
-
- 2. **Package instances**. Once instantiated, a package is
- essentially a software installer. Spack calls methods like
- ``do_install()`` on the ``Package`` object, and it uses those to
- drive user-implemented methods like ``patch()``, ``install()``, and
- other build steps. To install software, an instantiated package
- needs a *concrete* spec, which guides the behavior of the various
- install methods.
-
- Packages are imported from repos (see ``repo.py``).
-
- **Package DSL**
-
- Look in ``lib/spack/docs`` or check https://spack.readthedocs.io for
- the full documentation of the package domain-specific language. That
- used to be partially documented here, but as it grew, the docs here
- became increasingly out of date.
-
- **Package Lifecycle**
-
- A package's lifecycle over a run of Spack looks something like this:
-
- .. code-block:: python
-
- p = Package() # Done for you by spack
-
- p.do_fetch() # downloads tarball from a URL (or VCS)
- p.do_stage() # expands tarball in a temp directory
- p.do_patch() # applies patches to expanded source
- p.do_install() # calls package's install() function
- p.do_uninstall() # removes install directory
-
- although packages that do not have code have nothing to fetch so omit
- ``p.do_fetch()``.
-
- There are also some other commands that clean the build area:
-
- .. code-block:: python
-
- p.do_clean() # removes the stage directory entirely
- p.do_restage() # removes the build directory and
- # re-expands the archive.
-
- The convention used here is that a ``do_*`` function is intended to be
- called internally by Spack commands (in ``spack.cmd``). These aren't for
- package writers to override, and doing so may break the functionality
- of the Package class.
-
- Package creators have a lot of freedom, and they could technically
- override anything in this class. That is not usually required.
-
- For most use cases. Package creators typically just add attributes
- like ``homepage`` and, for a code-based package, ``url``, or functions
- such as ``install()``.
- There are many custom ``Package`` subclasses in the
- ``spack.build_systems`` package that make things even easier for
- specific build systems.
-
- """
- #
- # These are default values for instance variables.
- #
-
- #: A list or set of build time test functions to be called when tests
- #: are executed or 'None' if there are no such test functions.
- build_time_test_callbacks = None # type: Optional[List[str]]
-
- #: By default, packages are not virtual
- #: Virtual packages override this attribute
- virtual = False
-
- #: Most Spack packages are used to install source or binary code while
- #: those that do not can be used to install a set of other Spack packages.
- has_code = True
-
- #: A list or set of install time test functions to be called when tests
- #: are executed or 'None' if there are no such test functions.
- install_time_test_callbacks = None # type: Optional[List[str]]
-
- #: By default we build in parallel. Subclasses can override this.
- parallel = True
-
- #: By default do not run tests within package's install()
- run_tests = False
-
- # FIXME: this is a bad object-oriented design, should be moved to Clang.
- #: By default do not setup mockup XCode on macOS with Clang
- use_xcode = False
-
- #: Most packages are NOT extendable. Set to True if you want extensions.
- extendable = False
-
- #: When True, add RPATHs for the entire DAG. When False, add RPATHs only
- #: for immediate dependencies.
- transitive_rpaths = True
-
- #: List of prefix-relative file paths (or a single path). If these do
- #: not exist after install, or if they exist but are not files,
- #: sanity checks fail.
- sanity_check_is_file = [] # type: List[str]
-
- #: List of prefix-relative directory paths (or a single path). If
- #: these do not exist after install, or if they exist but are not
- #: directories, sanity checks will fail.
- sanity_check_is_dir = [] # type: List[str]
-
- #: List of glob expressions. Each expression must either be
- #: absolute or relative to the package source path.
- #: Matching artifacts found at the end of the build process will be
- #: copied in the same directory tree as _spack_build_logfile and
- #: _spack_build_envfile.
- archive_files = [] # type: List[str]
-
- #: Boolean. Set to ``True`` for packages that require a manual download.
- #: This is currently used by package sanity tests and generation of a
- #: more meaningful fetch failure error.
- manual_download = False
-
- #: Set of additional options used when fetching package versions.
- fetch_options = {} # type: Dict[str, Any]
-
- #
- # Set default licensing information
- #
- #: Boolean. If set to ``True``, this software requires a license.
- #: If set to ``False``, all of the ``license_*`` attributes will
- #: be ignored. Defaults to ``False``.
- license_required = False
-
- #: String. Contains the symbol used by the license manager to denote
- #: a comment. Defaults to ``#``.
- license_comment = '#'
-
- #: List of strings. These are files that the software searches for when
- #: looking for a license. All file paths must be relative to the
- #: installation directory. More complex packages like Intel may require
- #: multiple licenses for individual components. Defaults to the empty list.
- license_files = [] # type: List[str]
-
- #: List of strings. Environment variables that can be set to tell the
- #: software where to look for a license if it is not in the usual location.
- #: Defaults to the empty list.
- license_vars = [] # type: List[str]
-
- #: String. A URL pointing to license setup instructions for the software.
- #: Defaults to the empty string.
- license_url = ''
-
- #: Verbosity level, preserved across installs.
- _verbose = None
-
- #: index of patches by sha256 sum, built lazily
- _patches_by_hash = None
-
- #: Package homepage where users can find more information about the package
- homepage = None # type: str
-
- #: Default list URL (place to find available versions)
- list_url = None # type: str
-
- #: Link depth to which list_url should be searched for new versions
- list_depth = 0
-
- #: List of strings which contains GitHub usernames of package maintainers.
- #: Do not include @ here in order not to unnecessarily ping the users.
- maintainers = [] # type: List[str]
-
- #: List of attributes to be excluded from a package's hash.
- metadata_attrs = [
- "homepage",
- "url",
- "urls",
- "list_url",
- "extendable",
- "parallel",
- "make_jobs",
- "maintainers",
- "tags",
- ]
-
- #: Boolean. If set to ``True``, the smoke/install test requires a compiler.
- #: This is currently used by smoke tests to ensure a compiler is available
- #: to build a custom test code.
- test_requires_compiler = False
-
- #: List of test failures encountered during a smoke/install test run.
- test_failures = None
-
- #: TestSuite instance used to manage smoke/install tests for one or more
- #: specs.
- test_suite = None
-
- def __init__(self, spec):
- # this determines how the package should be built.
- self.spec = spec
-
- # Allow custom staging paths for packages
- self.path = None
-
- # Keep track of whether or not this package was installed from
- # a binary cache.
- self.installed_from_binary_cache = False
-
- # Ensure that only one of these two attributes are present
- if getattr(self, 'url', None) and getattr(self, 'urls', None):
- msg = "a package can have either a 'url' or a 'urls' attribute"
- msg += " [package '{0.name}' defines both]"
- raise ValueError(msg.format(self))
-
- # init internal variables
- self._stage = None
- self._fetcher = None
-
- # Set up timing variables
- self._fetch_time = 0.0
-
- if self.is_extension:
- spack.repo.get(self.extendee_spec)._check_extendable()
-
- super(PackageBase, self).__init__()
-
- @classmethod
- def possible_dependencies(
- cls, transitive=True, expand_virtuals=True, deptype='all',
- visited=None, missing=None, virtuals=None):
- """Return dict of possible dependencies of this package.
-
- Args:
- transitive (bool or None): return all transitive dependencies if
- True, only direct dependencies if False (default True)..
- expand_virtuals (bool or None): expand virtual dependencies into
- all possible implementations (default True)
- deptype (str or tuple or None): dependency types to consider
- visited (dict or None): dict of names of dependencies visited so
- far, mapped to their immediate dependencies' names.
- missing (dict or None): dict to populate with packages and their
- *missing* dependencies.
- virtuals (set): if provided, populate with virtuals seen so far.
-
- Returns:
- (dict): dictionary mapping dependency names to *their*
- immediate dependencies
-
- Each item in the returned dictionary maps a (potentially
- transitive) dependency of this package to its possible
- *immediate* dependencies. If ``expand_virtuals`` is ``False``,
- virtual package names wil be inserted as keys mapped to empty
- sets of dependencies. Virtuals, if not expanded, are treated as
- though they have no immediate dependencies.
-
- Missing dependencies by default are ignored, but if a
- missing dict is provided, it will be populated with package names
- mapped to any dependencies they have that are in no
- repositories. This is only populated if transitive is True.
-
- Note: the returned dict *includes* the package itself.
-
- """
- deptype = spack.dependency.canonical_deptype(deptype)
-
- visited = {} if visited is None else visited
- missing = {} if missing is None else missing
-
- visited.setdefault(cls.name, set())
-
- for name, conditions in cls.dependencies.items():
- # check whether this dependency could be of the type asked for
- types = [dep.type for cond, dep in conditions.items()]
- types = set.union(*types)
- if not any(d in types for d in deptype):
- continue
-
- # expand virtuals if enabled, otherwise just stop at virtuals
- if spack.repo.path.is_virtual(name):
- if virtuals is not None:
- virtuals.add(name)
- if expand_virtuals:
- providers = spack.repo.path.providers_for(name)
- dep_names = [spec.name for spec in providers]
- else:
- visited.setdefault(cls.name, set()).add(name)
- visited.setdefault(name, set())
- continue
- else:
- dep_names = [name]
-
- # add the dependency names to the visited dict
- visited.setdefault(cls.name, set()).update(set(dep_names))
-
- # recursively traverse dependencies
- for dep_name in dep_names:
- if dep_name in visited:
- continue
-
- visited.setdefault(dep_name, set())
-
- # skip the rest if not transitive
- if not transitive:
- continue
-
- try:
- dep_cls = spack.repo.path.get_pkg_class(dep_name)
- except spack.repo.UnknownPackageError:
- # log unknown packages
- missing.setdefault(cls.name, set()).add(dep_name)
- continue
-
- dep_cls.possible_dependencies(
- transitive, expand_virtuals, deptype, visited, missing,
- virtuals)
-
- return visited
-
- def enum_constraints(self, visited=None):
- """Return transitive dependency constraints on this package."""
- if visited is None:
- visited = set()
- visited.add(self.name)
-
- names = []
- clauses = []
-
- for name in self.dependencies:
- if name not in visited and not spack.spec.Spec(name).virtual:
- pkg = spack.repo.get(name)
- dvis, dnames, dclauses = pkg.enum_constraints(visited)
- visited |= dvis
- names.extend(dnames)
- clauses.extend(dclauses)
-
- return visited
-
- # package_dir and module are *class* properties (see PackageMeta),
- # but to make them work on instances we need these defs as well.
- @property
- def package_dir(self):
- """Directory where the package.py file lives."""
- return type(self).package_dir
-
- @property
- def module(self):
- """Module object that this package is defined in."""
- return type(self).module
-
- @property
- def namespace(self):
- """Spack namespace for the package, which identifies its repo."""
- return type(self).namespace
-
- @property
- def fullname(self):
- """Name of this package, including namespace: namespace.name."""
- return type(self).fullname
-
- @property
- def fullnames(self):
- return type(self).fullnames
-
- @property
- def name(self):
- """Name of this package (the module without parent modules)."""
- return type(self).name
-
- @property
- def global_license_dir(self):
- """Returns the directory where global license files are stored."""
- return type(self).global_license_dir
-
- @property
- def global_license_file(self):
- """Returns the path where a global license file for this
- particular package should be stored."""
- if not self.license_files:
- return
- return os.path.join(self.global_license_dir, self.name,
- os.path.basename(self.license_files[0]))
-
- @property
- def version(self):
- if not self.spec.versions.concrete:
- raise ValueError("Version requested for a package that"
- " does not have a concrete version.")
- return self.spec.versions[0]
-
- @memoized
- def version_urls(self):
- """OrderedDict of explicitly defined URLs for versions of this package.
-
- Return:
- An OrderedDict (version -> URL) different versions of this
- package, sorted by version.
-
- A version's URL only appears in the result if it has an an
- explicitly defined ``url`` argument. So, this list may be empty
- if a package only defines ``url`` at the top level.
- """
- version_urls = collections.OrderedDict()
- for v, args in sorted(self.versions.items()):
- if 'url' in args:
- version_urls[v] = args['url']
- return version_urls
-
- def nearest_url(self, version):
- """Finds the URL with the "closest" version to ``version``.
-
- This uses the following precedence order:
-
- 1. Find the next lowest or equal version with a URL.
- 2. If no lower URL, return the next *higher* URL.
- 3. If no higher URL, return None.
-
- """
- version_urls = self.version_urls()
-
- if version in version_urls:
- return version_urls[version]
-
- last_url = None
- for v, u in self.version_urls().items():
- if v > version:
- if last_url:
- return last_url
- last_url = u
-
- return last_url
-
- def url_for_version(self, version):
- """Returns a URL from which the specified version of this package
- may be downloaded.
-
- version: class Version
- The version for which a URL is sought.
-
- See Class Version (version.py)
- """
- return self._implement_all_urls_for_version(version)[0]
-
- def all_urls_for_version(self, version, custom_url_for_version=None):
- """Returns all URLs derived from version_urls(), url, urls, and
- list_url (if it contains a version) in a package in that order.
-
- version: class Version
- The version for which a URL is sought.
-
- See Class Version (version.py)
- """
- uf = None
- if type(self).url_for_version != Package.url_for_version:
- uf = self.url_for_version
- return self._implement_all_urls_for_version(version, uf)
-
- def _implement_all_urls_for_version(self, version, custom_url_for_version=None):
- if not isinstance(version, Version):
- version = Version(version)
-
- urls = []
-
- # If we have a specific URL for this version, don't extrapolate.
- version_urls = self.version_urls()
- if version in version_urls:
- urls.append(version_urls[version])
-
- # if there is a custom url_for_version, use it
- if custom_url_for_version is not None:
- u = custom_url_for_version(version)
- if u not in urls and u is not None:
- urls.append(u)
-
- def sub_and_add(u):
- if u is None:
- return
- # skip the url if there is no version to replace
- try:
- spack.url.parse_version(u)
- except spack.url.UndetectableVersionError:
- return
- nu = spack.url.substitute_version(u, self.url_version(version))
-
- urls.append(nu)
- # If no specific URL, use the default, class-level URL
- sub_and_add(getattr(self, 'url', None))
- for u in getattr(self, 'urls', []):
- sub_and_add(u)
-
- sub_and_add(getattr(self, 'list_url', None))
-
- # if no version-bearing URLs can be found, try them raw
- if not urls:
- default_url = getattr(self, "url", getattr(self, "urls", [None])[0])
-
- # if no exact match AND no class-level default, use the nearest URL
- if not default_url:
- default_url = self.nearest_url(version)
-
- # if there are NO URLs to go by, then we can't do anything
- if not default_url:
- raise NoURLError(self.__class__)
- urls.append(
- spack.url.substitute_version(
- default_url, self.url_version(version)
- )
- )
-
- return urls
-
- def find_valid_url_for_version(self, version):
- """Returns a URL from which the specified version of this package
- may be downloaded after testing whether the url is valid. Will try
- url, urls, and list_url before failing.
-
- version: class Version
- The version for which a URL is sought.
-
- See Class Version (version.py)
- """
- urls = self.all_urls_for_version(version)
-
- for u in urls:
- if spack.util.web.url_exists(u):
- return u
-
- return None
-
- def _make_resource_stage(self, root_stage, fetcher, resource):
- resource_stage_folder = self._resource_stage(resource)
- mirror_paths = spack.mirror.mirror_archive_paths(
- fetcher,
- os.path.join(self.name, "%s-%s" % (resource.name, self.version)))
- stage = ResourceStage(resource.fetcher,
- root=root_stage,
- resource=resource,
- name=resource_stage_folder,
- mirror_paths=mirror_paths,
- path=self.path)
- return stage
-
- def _download_search(self):
- dynamic_fetcher = fs.from_list_url(self)
- return [dynamic_fetcher] if dynamic_fetcher else []
-
- def _make_root_stage(self, fetcher):
- # Construct a mirror path (TODO: get this out of package.py)
- mirror_paths = spack.mirror.mirror_archive_paths(
- fetcher,
- os.path.join(self.name, "%s-%s" % (self.name, self.version)),
- self.spec)
- # Construct a path where the stage should build..
- s = self.spec
- stage_name = "{0}{1}-{2}-{3}".format(stage_prefix, s.name, s.version,
- s.dag_hash())
-
- stage = Stage(fetcher, mirror_paths=mirror_paths, name=stage_name,
- path=self.path, search_fn=self._download_search)
- return stage
-
- def _make_stage(self):
- # If it's a dev package (not transitively), use a DIY stage object
- dev_path_var = self.spec.variants.get('dev_path', None)
- if dev_path_var:
- return spack.stage.DIYStage(dev_path_var.value)
-
- # Construct a composite stage on top of the composite FetchStrategy
- composite_fetcher = self.fetcher
- composite_stage = StageComposite()
- resources = self._get_needed_resources()
- for ii, fetcher in enumerate(composite_fetcher):
- if ii == 0:
- # Construct root stage first
- stage = self._make_root_stage(fetcher)
- else:
- # Construct resource stage
- resource = resources[ii - 1] # ii == 0 is root!
- stage = self._make_resource_stage(composite_stage[0], fetcher,
- resource)
- # Append the item to the composite
- composite_stage.append(stage)
-
- return composite_stage
-
- @property
- def stage(self):
- """Get the build staging area for this package.
-
- This automatically instantiates a ``Stage`` object if the package
- doesn't have one yet, but it does not create the Stage directory
- on the filesystem.
- """
- if not self.spec.versions.concrete:
- raise ValueError(
- "Cannot retrieve stage for package without concrete version.")
- if self._stage is None:
- self._stage = self._make_stage()
- return self._stage
-
- @stage.setter
- def stage(self, stage):
- """Allow a stage object to be set to override the default."""
- self._stage = stage
-
- @property
- def env_path(self):
- """Return the build environment file path associated with staging."""
- # Backward compatibility: Return the name of an existing log path;
- # otherwise, return the current install env path name.
- old_filename = os.path.join(self.stage.path, 'spack-build.env')
- if os.path.exists(old_filename):
- return old_filename
- else:
- return os.path.join(self.stage.path, _spack_build_envfile)
-
- @property
- def env_mods_path(self):
- """
- Return the build environment modifications file path associated with
- staging.
- """
- return os.path.join(self.stage.path, _spack_build_envmodsfile)
-
- @property
- def metadata_dir(self):
- """Return the install metadata directory."""
- return spack.store.layout.metadata_path(self.spec)
-
- @property
- def install_env_path(self):
- """
- Return the build environment file path on successful installation.
- """
- # Backward compatibility: Return the name of an existing log path;
- # otherwise, return the current install env path name.
- old_filename = os.path.join(self.metadata_dir, 'build.env')
- if os.path.exists(old_filename):
- return old_filename
- else:
- return os.path.join(self.metadata_dir, _spack_build_envfile)
-
- @property
- def log_path(self):
- """Return the build log file path associated with staging."""
- # Backward compatibility: Return the name of an existing log path.
- for filename in ['spack-build.out', 'spack-build.txt']:
- old_log = os.path.join(self.stage.path, filename)
- if os.path.exists(old_log):
- return old_log
-
- # Otherwise, return the current log path name.
- return os.path.join(self.stage.path, _spack_build_logfile)
-
- @property
- def phase_log_files(self):
- """Find sorted phase log files written to the staging directory"""
- logs_dir = os.path.join(self.stage.path, "spack-build-*-out.txt")
- log_files = glob.glob(logs_dir)
- log_files.sort()
- return log_files
-
- @property
- def install_log_path(self):
- """Return the build log file path on successful installation."""
- # Backward compatibility: Return the name of an existing install log.
- for filename in ['build.out', 'build.txt']:
- old_log = os.path.join(self.metadata_dir, filename)
- if os.path.exists(old_log):
- return old_log
-
- # Otherwise, return the current install log path name.
- return os.path.join(self.metadata_dir, _spack_build_logfile)
-
- @property
- def configure_args_path(self):
- """Return the configure args file path associated with staging."""
- return os.path.join(self.stage.path, _spack_configure_argsfile)
-
- @property
- def test_install_log_path(self):
- """Return the install phase-time test log file path, if set."""
- return getattr(self, 'test_log_file', None)
-
- @property
- def install_test_install_log_path(self):
- """Return the install location for the install phase-time test log."""
- return fsys.join_path(self.metadata_dir, _spack_install_test_log)
-
- @property
- def times_log_path(self):
- """Return the times log json file."""
- return os.path.join(self.metadata_dir, _spack_times_log)
-
- @property
- def install_configure_args_path(self):
- """Return the configure args file path on successful installation."""
- return os.path.join(self.metadata_dir, _spack_configure_argsfile)
-
- @property
- def install_test_root(self):
- """Return the install test root directory."""
- return os.path.join(self.metadata_dir, 'test')
-
- @property
- def installed(self):
- msg = ('the "PackageBase.installed" property is deprecated and will be '
- 'removed in Spack v0.19, use "Spec.installed" instead')
- warnings.warn(msg)
- return self.spec.installed
-
- @property
- def installed_upstream(self):
- msg = ('the "PackageBase.installed_upstream" property is deprecated and will '
- 'be removed in Spack v0.19, use "Spec.installed_upstream" instead')
- warnings.warn(msg)
- return self.spec.installed_upstream
-
- def _make_fetcher(self):
- # Construct a composite fetcher that always contains at least
- # one element (the root package). In case there are resources
- # associated with the package, append their fetcher to the
- # composite.
- root_fetcher = fs.for_package_version(self, self.version)
- fetcher = fs.FetchStrategyComposite() # Composite fetcher
- fetcher.append(root_fetcher) # Root fetcher is always present
- resources = self._get_needed_resources()
- for resource in resources:
- fetcher.append(resource.fetcher)
- return fetcher
-
- @property
- def fetcher(self):
- if not self.spec.versions.concrete:
- raise ValueError("Cannot retrieve fetcher for"
- " package without concrete version.")
- if not self._fetcher:
- self._fetcher = self._make_fetcher()
- return self._fetcher
-
- @fetcher.setter
- def fetcher(self, f):
- self._fetcher = f
-
- def dependencies_of_type(self, *deptypes):
- """Get dependencies that can possibly have these deptypes.
-
- This analyzes the package and determines which dependencies *can*
- be a certain kind of dependency. Note that they may not *always*
- be this kind of dependency, since dependencies can be optional,
- so something may be a build dependency in one configuration and a
- run dependency in another.
- """
- return dict(
- (name, conds) for name, conds in self.dependencies.items()
- if any(dt in self.dependencies[name][cond].type
- for cond in conds for dt in deptypes))
-
- @property
- def extendee_spec(self):
- """
- Spec of the extendee of this package, or None if it is not an extension
- """
- if not self.extendees:
- return None
-
- deps = []
-
- # If the extendee is in the spec's deps already, return that.
- for dep in self.spec.traverse(deptype=('link', 'run')):
- if dep.name in self.extendees:
- deps.append(dep)
-
- # TODO: allow more than one active extendee.
- if deps:
- assert len(deps) == 1
- return deps[0]
-
- # if the spec is concrete already, then it extends something
- # that is an *optional* dependency, and the dep isn't there.
- if self.spec._concrete:
- return None
- else:
- # TODO: do something sane here with more than one extendee
- # If it's not concrete, then return the spec from the
- # extends() directive since that is all we know so far.
- spec, kwargs = next(iter(self.extendees.items()))
- return spec
-
- @property
- def extendee_args(self):
- """
- Spec of the extendee of this package, or None if it is not an extension
- """
- if not self.extendees:
- return None
-
- # TODO: allow multiple extendees.
- name = next(iter(self.extendees))
- return self.extendees[name][1]
-
- @property
- def is_extension(self):
- # if it is concrete, it's only an extension if it actually
- # dependes on the extendee.
- if self.spec._concrete:
- return self.extendee_spec is not None
- else:
- # If not, then it's an extension if it *could* be an extension
- return bool(self.extendees)
-
- def extends(self, spec):
- '''
- Returns True if this package extends the given spec.
-
- If ``self.spec`` is concrete, this returns whether this package extends
- the given spec.
-
- If ``self.spec`` is not concrete, this returns whether this package may
- extend the given spec.
- '''
- if spec.name not in self.extendees:
- return False
- s = self.extendee_spec
- return s and spec.satisfies(s)
-
- def is_activated(self, view):
- """Return True if package is activated."""
- if not self.is_extension:
- raise ValueError(
- "is_activated called on package that is not an extension.")
- if self.extendee_spec.installed_upstream:
- # If this extends an upstream package, it cannot be activated for
- # it. This bypasses construction of the extension map, which can
- # can fail when run in the context of a downstream Spack instance
- return False
- extensions_layout = view.extensions_layout
- exts = extensions_layout.extension_map(self.extendee_spec)
- return (self.name in exts) and (exts[self.name] == self.spec)
-
- def provides(self, vpkg_name):
- """
- True if this package provides a virtual package with the specified name
- """
- return any(
- any(self.spec.satisfies(c) for c in constraints)
- for s, constraints in self.provided.items() if s.name == vpkg_name
- )
-
- @property
- def virtuals_provided(self):
- """
- virtual packages provided by this package with its spec
- """
- return [vspec for vspec, constraints in self.provided.items()
- if any(self.spec.satisfies(c) for c in constraints)]
-
- @property
- def prefix(self):
- """Get the prefix into which this package should be installed."""
- return self.spec.prefix
-
- @property # type: ignore[misc]
- @memoized
- def compiler(self):
- """Get the spack.compiler.Compiler object used to build this package"""
- if not self.spec.concrete:
- raise ValueError("Can only get a compiler for a concrete package.")
-
- return spack.compilers.compiler_for_spec(self.spec.compiler,
- self.spec.architecture)
-
- def url_version(self, version):
- """
- Given a version, this returns a string that should be substituted
- into the package's URL to download that version.
-
- By default, this just returns the version string. Subclasses may need
- to override this, e.g. for boost versions where you need to ensure that
- there are _'s in the download URL.
- """
- return str(version)
-
- def remove_prefix(self):
- """
- Removes the prefix for a package along with any empty parent
- directories
- """
- spack.store.layout.remove_install_directory(self.spec)
-
- @property
- def download_instr(self):
- """
- Defines the default manual download instructions. Packages can
- override the property to provide more information.
-
- Returns:
- (str): default manual download instructions
- """
- required = ('Manual download is required for {0}. '
- .format(self.spec.name) if self.manual_download else '')
- return ('{0}Refer to {1} for download instructions.'
- .format(required, self.spec.package.homepage))
-
- def do_fetch(self, mirror_only=False):
- """
- Creates a stage directory and downloads the tarball for this package.
- Working directory will be set to the stage directory.
- """
- if not self.has_code or self.spec.external:
- tty.debug('No fetch required for {0}'.format(self.name))
- return
-
- checksum = spack.config.get('config:checksum')
- fetch = self.stage.managed_by_spack
- if checksum and fetch and (self.version not in self.versions) \
- and (not self.version.is_commit):
- tty.warn("There is no checksum on file to fetch %s safely." %
- self.spec.cformat('{name}{@version}'))
-
- # Ask the user whether to skip the checksum if we're
- # interactive, but just fail if non-interactive.
- ck_msg = "Add a checksum or use --no-checksum to skip this check."
- ignore_checksum = False
- if sys.stdout.isatty():
- ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
- default=False)
- if ignore_checksum:
- tty.debug('Fetching with no checksum. {0}'
- .format(ck_msg))
-
- if not ignore_checksum:
- raise FetchError("Will not fetch %s" %
- self.spec.format('{name}{@version}'), ck_msg)
-
- deprecated = spack.config.get('config:deprecated')
- if not deprecated and self.versions.get(
- self.version, {}).get('deprecated', False):
- tty.warn("{0} is deprecated and may be removed in a future Spack "
- "release.".format(
- self.spec.format('{name}{@version}')))
-
- # Ask the user whether to install deprecated version if we're
- # interactive, but just fail if non-interactive.
- dp_msg = ("If you are willing to be a maintainer for this version "
- "of the package, submit a PR to remove `deprecated=False"
- "`, or use `--deprecated` to skip this check.")
- ignore_deprecation = False
- if sys.stdout.isatty():
- ignore_deprecation = tty.get_yes_or_no(" Fetch anyway?",
- default=False)
-
- if ignore_deprecation:
- tty.debug("Fetching deprecated version. {0}".format(
- dp_msg))
-
- if not ignore_deprecation:
- raise FetchError("Will not fetch {0}".format(
- self.spec.format('{name}{@version}')), dp_msg)
-
- self.stage.create()
- err_msg = None if not self.manual_download else self.download_instr
- start_time = time.time()
- self.stage.fetch(mirror_only, err_msg=err_msg)
- self._fetch_time = time.time() - start_time
-
- if checksum and self.version in self.versions:
- self.stage.check()
-
- self.stage.cache_local()
-
- for patch in self.spec.patches:
- patch.fetch()
- if patch.stage:
- patch.stage.cache_local()
-
- def do_stage(self, mirror_only=False):
- """Unpacks and expands the fetched tarball."""
- # Always create the stage directory at this point. Why? A no-code
- # package may want to use the installation process to install metadata.
- self.stage.create()
-
- # Fetch/expand any associated code.
- if self.has_code:
- self.do_fetch(mirror_only)
- self.stage.expand_archive()
-
- if not os.listdir(self.stage.path):
- raise FetchError("Archive was empty for %s" % self.name)
- else:
- # Support for post-install hooks requires a stage.source_path
- fsys.mkdirp(self.stage.source_path)
-
- def do_patch(self):
- """Applies patches if they haven't been applied already."""
- if not self.spec.concrete:
- raise ValueError("Can only patch concrete packages.")
-
- # Kick off the stage first. This creates the stage.
- self.do_stage()
-
- # Package can add its own patch function.
- has_patch_fun = hasattr(self, 'patch') and callable(self.patch)
-
- # Get the patches from the spec (this is a shortcut for the MV-variant)
- patches = self.spec.patches
-
- # If there are no patches, note it.
- if not patches and not has_patch_fun:
- tty.msg('No patches needed for {0}'.format(self.name))
- return
-
- # Construct paths to special files in the archive dir used to
- # keep track of whether patches were successfully applied.
- archive_dir = self.stage.source_path
- good_file = os.path.join(archive_dir, '.spack_patched')
- no_patches_file = os.path.join(archive_dir, '.spack_no_patches')
- bad_file = os.path.join(archive_dir, '.spack_patch_failed')
-
- # If we encounter an archive that failed to patch, restage it
- # so that we can apply all the patches again.
- if os.path.isfile(bad_file):
- tty.debug('Patching failed last time. Restaging.')
- self.stage.restage()
-
- # If this file exists, then we already applied all the patches.
- if os.path.isfile(good_file):
- tty.msg('Already patched {0}'.format(self.name))
- return
- elif os.path.isfile(no_patches_file):
- tty.msg('No patches needed for {0}'.format(self.name))
- return
-
- # Apply all the patches for specs that match this one
- patched = False
- for patch in patches:
- try:
- with fsys.working_dir(self.stage.source_path):
- patch.apply(self.stage)
- tty.msg('Applied patch {0}'.format(patch.path_or_url))
- patched = True
- except spack.error.SpackError as e:
- tty.debug(e)
-
- # Touch bad file if anything goes wrong.
- tty.msg('Patch %s failed.' % patch.path_or_url)
- fsys.touch(bad_file)
- raise
-
- if has_patch_fun:
- try:
- with fsys.working_dir(self.stage.source_path):
- self.patch()
- tty.msg('Ran patch() for {0}'.format(self.name))
- patched = True
- except spack.multimethod.NoSuchMethodError:
- # We are running a multimethod without a default case.
- # If there's no default it means we don't need to patch.
- if not patched:
- # if we didn't apply a patch from a patch()
- # directive, AND the patch function didn't apply, say
- # no patches are needed. Otherwise, we already
- # printed a message for each patch.
- tty.msg('No patches needed for {0}'.format(self.name))
- except spack.error.SpackError as e:
- tty.debug(e)
-
- # Touch bad file if anything goes wrong.
- tty.msg('patch() function failed for {0}'.format(self.name))
- fsys.touch(bad_file)
- raise
-
- # Get rid of any old failed file -- patches have either succeeded
- # or are not needed. This is mostly defensive -- it's needed
- # if the restage() method doesn't clean *everything* (e.g., for a repo)
- if os.path.isfile(bad_file):
- os.remove(bad_file)
-
- # touch good or no patches file so that we skip next time.
- if patched:
- fsys.touch(good_file)
- else:
- fsys.touch(no_patches_file)
-
- @classmethod
- def all_patches(cls):
- """Retrieve all patches associated with the package.
-
- Retrieves patches on the package itself as well as patches on the
- dependencies of the package."""
- patches = []
- for _, patch_list in cls.patches.items():
- for patch in patch_list:
- patches.append(patch)
-
- pkg_deps = cls.dependencies
- for dep_name in pkg_deps:
- for _, dependency in pkg_deps[dep_name].items():
- for _, patch_list in dependency.patches.items():
- for patch in patch_list:
- patches.append(patch)
-
- return patches
-
- def content_hash(self, content=None):
- """Create a hash based on the artifacts and patches used to build this package.
-
- This includes:
- * source artifacts (tarballs, repositories) used to build;
- * content hashes (``sha256``'s) of all patches applied by Spack; and
- * canonicalized contents the ``package.py`` recipe used to build.
-
- This hash is only included in Spack's DAG hash for concrete specs, but if it
- happens to be called on a package with an abstract spec, only applicable (i.e.,
- determinable) portions of the hash will be included.
-
- """
- # list of components to make up the hash
- hash_content = []
-
- # source artifacts/repositories
- # TODO: resources
- if self.spec.versions.concrete:
- try:
- source_id = fs.for_package_version(self, self.version).source_id()
- except (fs.ExtrapolationError, fs.InvalidArgsError):
- # ExtrapolationError happens if the package has no fetchers defined.
- # InvalidArgsError happens when there are version directives with args,
- # but none of them identifies an actual fetcher.
- source_id = None
-
- if not source_id:
- # TODO? in cases where a digest or source_id isn't available,
- # should this attempt to download the source and set one? This
- # probably only happens for source repositories which are
- # referenced by branch name rather than tag or commit ID.
- env = spack.environment.active_environment()
- from_local_sources = env and env.is_develop(self.spec)
- if not self.spec.external and not from_local_sources:
- message = 'Missing a source id for {s.name}@{s.version}'
- tty.warn(message.format(s=self))
- hash_content.append(''.encode('utf-8'))
- else:
- hash_content.append(source_id.encode('utf-8'))
-
- # patch sha256's
- # Only include these if they've been assigned by the concretizer.
- # We check spec._patches_assigned instead of spec.concrete because
- # we have to call package_hash *before* marking specs concrete
- if self.spec._patches_assigned():
- hash_content.extend(
- ':'.join((p.sha256, str(p.level))).encode('utf-8')
- for p in self.spec.patches
- )
-
- # package.py contents
- hash_content.append(package_hash(self.spec, source=content).encode('utf-8'))
-
- # put it all together and encode as base32
- b32_hash = base64.b32encode(
- hashlib.sha256(
- bytes().join(sorted(hash_content))
- ).digest()
- ).lower()
-
- # convert from bytes if running python 3
- if sys.version_info[0] >= 3:
- b32_hash = b32_hash.decode('utf-8')
-
- return b32_hash
-
- def _has_make_target(self, target):
- """Checks to see if 'target' is a valid target in a Makefile.
-
- Parameters:
- target (str): the target to check for
-
- Returns:
- bool: True if 'target' is found, else False
- """
- # Prevent altering LC_ALL for 'make' outside this function
- make = copy.deepcopy(inspect.getmodule(self).make)
-
- # Use English locale for missing target message comparison
- make.add_default_env('LC_ALL', 'C')
-
- # Check if we have a Makefile
- for makefile in ['GNUmakefile', 'Makefile', 'makefile']:
- if os.path.exists(makefile):
- break
- else:
- tty.debug('No Makefile found in the build directory')
- return False
-
- # Check if 'target' is a valid target.
- #
- # `make -n target` performs a "dry run". It prints the commands that
- # would be run but doesn't actually run them. If the target does not
- # exist, you will see one of the following error messages:
- #
- # GNU Make:
- # make: *** No rule to make target `test'. Stop.
- # *** No rule to make target 'test'. Stop.
- #
- # BSD Make:
- # make: don't know how to make test. Stop
- missing_target_msgs = [
- "No rule to make target `{0}'. Stop.",
- "No rule to make target '{0}'. Stop.",
- "don't know how to make {0}. Stop",
- ]
-
- kwargs = {
- 'fail_on_error': False,
- 'output': os.devnull,
- 'error': str,
- }
-
- stderr = make('-n', target, **kwargs)
-
- for missing_target_msg in missing_target_msgs:
- if missing_target_msg.format(target) in stderr:
- tty.debug("Target '{0}' not found in {1}"
- .format(target, makefile))
- return False
-
- return True
-
- def _if_make_target_execute(self, target, *args, **kwargs):
- """Runs ``make target`` if 'target' is a valid target in the Makefile.
-
- Parameters:
- target (str): the target to potentially execute
- """
- if self._has_make_target(target):
- # Execute target
- inspect.getmodule(self).make(target, *args, **kwargs)
-
- def _has_ninja_target(self, target):
- """Checks to see if 'target' is a valid target in a Ninja build script.
-
- Parameters:
- target (str): the target to check for
-
- Returns:
- bool: True if 'target' is found, else False
- """
- ninja = inspect.getmodule(self).ninja
-
- # Check if we have a Ninja build script
- if not os.path.exists('build.ninja'):
- tty.debug('No Ninja build script found in the build directory')
- return False
-
- # Get a list of all targets in the Ninja build script
- # https://ninja-build.org/manual.html#_extra_tools
- all_targets = ninja('-t', 'targets', 'all', output=str).split('\n')
-
- # Check if 'target' is a valid target
- matches = [line for line in all_targets
- if line.startswith(target + ':')]
-
- if not matches:
- tty.debug("Target '{0}' not found in build.ninja"
- .format(target))
- return False
-
- return True
-
- def _if_ninja_target_execute(self, target, *args, **kwargs):
- """Runs ``ninja target`` if 'target' is a valid target in the Ninja
- build script.
-
- Parameters:
- target (str): the target to potentially execute
- """
- if self._has_ninja_target(target):
- # Execute target
- inspect.getmodule(self).ninja(target, *args, **kwargs)
-
- def _get_needed_resources(self):
- resources = []
- # Select the resources that are needed for this build
- if self.spec.concrete:
- for when_spec, resource_list in self.resources.items():
- if when_spec in self.spec:
- resources.extend(resource_list)
- else:
- for when_spec, resource_list in self.resources.items():
- # Note that variant checking is always strict for specs where
- # the name is not specified. But with strict variant checking,
- # only variants mentioned in 'other' are checked. Here we only
- # want to make sure that no constraints in when_spec
- # conflict with the spec, so we need to invoke
- # when_spec.satisfies(self.spec) vs.
- # self.spec.satisfies(when_spec)
- if when_spec.satisfies(self.spec, strict=False):
- resources.extend(resource_list)
- # Sorts the resources by the length of the string representing their
- # destination. Since any nested resource must contain another
- # resource's name in its path, it seems that should work
- resources = sorted(resources, key=lambda res: len(res.destination))
- return resources
-
- def _resource_stage(self, resource):
- pieces = ['resource', resource.name, self.spec.dag_hash()]
- resource_stage_folder = '-'.join(pieces)
- return resource_stage_folder
-
- @contextlib.contextmanager
- def _stage_and_write_lock(self):
- """Prefix lock nested in a stage."""
- with self.stage:
- with spack.store.db.prefix_write_lock(self.spec):
- yield
-
- def do_install(self, **kwargs):
- """Called by commands to install a package and or its dependencies.
-
- Package implementations should override install() to describe
- their build process.
-
- Args:
- cache_only (bool): Fail if binary package unavailable.
- dirty (bool): Don't clean the build environment before installing.
- explicit (bool): True if package was explicitly installed, False
- if package was implicitly installed (as a dependency).
- fail_fast (bool): Fail if any dependency fails to install;
- otherwise, the default is to install as many dependencies as
- possible (i.e., best effort installation).
- fake (bool): Don't really build; install fake stub files instead.
- force (bool): Install again, even if already installed.
- install_deps (bool): Install dependencies before installing this
- package
- install_source (bool): By default, source is not installed, but
- for debugging it might be useful to keep it around.
- keep_prefix (bool): Keep install prefix on failure. By default,
- destroys it.
- keep_stage (bool): By default, stage is destroyed only if there
- are no exceptions during build. Set to True to keep the stage
- even with exceptions.
- restage (bool): Force spack to restage the package source.
- skip_patch (bool): Skip patch stage of build if True.
- stop_before (InstallPhase): stop execution before this
- installation phase (or None)
- stop_at (InstallPhase): last installation phase to be executed
- (or None)
- tests (bool or list or set): False to run no tests, True to test
- all packages, or a list of package names to run tests for some
- use_cache (bool): Install from binary package, if available.
- verbose (bool): Display verbose build output (by default,
- suppresses it)
- """
- # Non-transitive dev specs need to keep the dev stage and be built from
- # source every time. Transitive ones just need to be built from source.
- dev_path_var = self.spec.variants.get('dev_path', None)
- if dev_path_var:
- kwargs['keep_stage'] = True
-
- builder = PackageInstaller([(self, kwargs)])
- builder.install()
-
- def cache_extra_test_sources(self, srcs):
- """Copy relative source paths to the corresponding install test subdir
-
- This method is intended as an optional install test setup helper for
- grabbing source files/directories during the installation process and
- copying them to the installation test subdirectory for subsequent use
- during install testing.
-
- Args:
- srcs (str or list): relative path for files and or
- subdirectories located in the staged source path that are to
- be copied to the corresponding location(s) under the install
- testing directory.
- """
- paths = [srcs] if isinstance(srcs, six.string_types) else srcs
-
- for path in paths:
- src_path = os.path.join(self.stage.source_path, path)
- dest_path = os.path.join(self.install_test_root, path)
- if os.path.isdir(src_path):
- fsys.install_tree(src_path, dest_path)
- else:
- fsys.mkdirp(os.path.dirname(dest_path))
- fsys.copy(src_path, dest_path)
-
- @contextlib.contextmanager
- def _setup_test(self, verbose, externals):
- self.test_failures = []
- if self.test_suite:
- self.test_log_file = self.test_suite.log_file_for_spec(self.spec)
- self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
- pkg_id = self.test_suite.test_pkg_id(self.spec)
- else:
- self.test_log_file = fsys.join_path(
- self.stage.path, _spack_install_test_log)
- pkg_id = self.spec.format('{name}-{version}-{hash:7}')
- fsys.touch(self.test_log_file) # Otherwise log_parse complains
-
- with tty.log.log_output(self.test_log_file, verbose) as logger:
- with logger.force_echo():
- tty.msg('Testing package {0}'.format(pkg_id))
-
- # use debug print levels for log file to record commands
- old_debug = tty.is_debug()
- tty.set_debug(True)
-
- try:
- yield logger
- finally:
- # reset debug level
- tty.set_debug(old_debug)
-
- def do_test(self, dirty=False, externals=False):
- if self.test_requires_compiler:
- compilers = spack.compilers.compilers_for_spec(
- self.spec.compiler, arch_spec=self.spec.architecture)
- if not compilers:
- tty.error('Skipping tests for package %s\n' %
- self.spec.format('{name}-{version}-{hash:7}') +
- 'Package test requires missing compiler %s' %
- self.spec.compiler)
- return
-
- kwargs = {
- 'dirty': dirty, 'fake': False, 'context': 'test',
- 'externals': externals
- }
- if tty.is_verbose():
- kwargs['verbose'] = True
- spack.build_environment.start_build_process(
- self, test_process, kwargs)
-
- def test(self):
- # Defer tests to virtual and concrete packages
- pass
-
- def run_test(self, exe, options=[], expected=[], status=0,
- installed=False, purpose='', skip_missing=False,
- work_dir=None):
- """Run the test and confirm the expected results are obtained
-
- Log any failures and continue, they will be re-raised later
-
- Args:
- exe (str): the name of the executable
- options (str or list): list of options to pass to the runner
- expected (str or list): list of expected output strings.
- Each string is a regex expected to match part of the output.
- status (int or list): possible passing status values
- with 0 meaning the test is expected to succeed
- installed (bool): if ``True``, the executable must be in the
- install prefix
- purpose (str): message to display before running test
- skip_missing (bool): skip the test if the executable is not
- in the install prefix bin directory or the provided work_dir
- work_dir (str or None): path to the smoke test directory
- """
- wdir = '.' if work_dir is None else work_dir
- with fsys.working_dir(wdir, create=True):
- try:
- runner = which(exe)
- if runner is None and skip_missing:
- return
- assert runner is not None, \
- "Failed to find executable '{0}'".format(exe)
-
- self._run_test_helper(
- runner, options, expected, status, installed, purpose)
- print("PASSED")
- return True
- except BaseException as e:
- # print a summary of the error to the log file
- # so that cdash and junit reporters know about it
- exc_type, _, tb = sys.exc_info()
- print('FAILED: {0}'.format(e))
- import traceback
-
- # remove the current call frame to exclude the extract_stack
- # call from the error
- stack = traceback.extract_stack()[:-1]
-
- # Package files have a line added at import time, so we re-read
- # the file to make line numbers match. We have to subtract two
- # from the line number because the original line number is
- # inflated once by the import statement and the lines are
- # displaced one by the import statement.
- for i, entry in enumerate(stack):
- filename, lineno, function, text = entry
- if spack.repo.is_package_file(filename):
- with open(filename, 'r') as f:
- lines = f.readlines()
- new_lineno = lineno - 2
- text = lines[new_lineno]
- stack[i] = (filename, new_lineno, function, text)
-
- # Format the stack to print and print it
- out = traceback.format_list(stack)
- for line in out:
- print(line.rstrip('\n'))
-
- if exc_type is spack.util.executable.ProcessError:
- out = six.StringIO()
- spack.build_environment.write_log_summary(
- out, 'test', self.test_log_file, last=1)
- m = out.getvalue()
- else:
- # We're below the package context, so get context from
- # stack instead of from traceback.
- # The traceback is truncated here, so we can't use it to
- # traverse the stack.
- m = '\n'.join(
- spack.build_environment.get_package_context(tb)
- )
-
- exc = e # e is deleted after this block
-
- # If we fail fast, raise another error
- if spack.config.get('config:fail_fast', False):
- raise TestFailure([(exc, m)])
- else:
- self.test_failures.append((exc, m))
- return False
-
- def _run_test_helper(self, runner, options, expected, status, installed,
- purpose):
- status = [status] if isinstance(status, six.integer_types) else status
- expected = [expected] if isinstance(expected, six.string_types) else \
- expected
- options = [options] if isinstance(options, six.string_types) else \
- options
-
- if purpose:
- tty.msg(purpose)
- else:
- tty.debug('test: {0}: expect command status in {1}'
- .format(runner.name, status))
-
- if installed:
- msg = "Executable '{0}' expected in prefix".format(runner.name)
- msg += ", found in {0} instead".format(runner.path)
- assert runner.path.startswith(self.spec.prefix), msg
-
- try:
- output = runner(*options, output=str.split, error=str.split)
-
- assert 0 in status, \
- 'Expected {0} execution to fail'.format(runner.name)
- except ProcessError as err:
- output = str(err)
- match = re.search(r'exited with status ([0-9]+)', output)
- if not (match and int(match.group(1)) in status):
- raise
-
- for check in expected:
- cmd = ' '.join([runner.name] + options)
- msg = "Expected '{0}' to match output of `{1}`".format(check, cmd)
- msg += '\n\nOutput: {0}'.format(output)
- assert re.search(check, output), msg
-
- def unit_test_check(self):
- """Hook for unit tests to assert things about package internals.
-
- Unit tests can override this function to perform checks after
- ``Package.install`` and all post-install hooks run, but before
- the database is updated.
-
- The overridden function may indicate that the install procedure
- should terminate early (before updating the database) by
- returning ``False`` (or any value such that ``bool(result)`` is
- ``False``).
-
- Return:
- (bool): ``True`` to continue, ``False`` to skip ``install()``
- """
- return True
-
- def sanity_check_prefix(self):
- """This function checks whether install succeeded."""
-
- def check_paths(path_list, filetype, predicate):
- if isinstance(path_list, six.string_types):
- path_list = [path_list]
-
- for path in path_list:
- abs_path = os.path.join(self.prefix, path)
- if not predicate(abs_path):
- raise InstallError(
- "Install failed for %s. No such %s in prefix: %s" %
- (self.name, filetype, path))
-
- check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
- check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
-
- installed = set(os.listdir(self.prefix))
- installed.difference_update(
- spack.store.layout.hidden_file_regexes)
- if not installed:
- raise InstallError(
- "Install failed for %s. Nothing was installed!" % self.name)
-
- def apply_macos_rpath_fixups(self):
- """On Darwin, make installed libraries more easily relocatable.
-
- Some build systems (handrolled, autotools, makefiles) can set their own
- rpaths that are duplicated by spack's compiler wrapper. This fixup
- interrogates, and postprocesses if necessary, all libraries installed
- by the code.
-
- It should be added as a @run_after to packaging systems (or individual
- packages) that do not install relocatable libraries by default.
- """
- if 'platform=darwin' not in self.spec:
- return
-
- from spack.relocate import fixup_macos_rpaths
- fixup_macos_rpaths(self.spec)
-
- @property
- def build_log_path(self):
- """
- Return the expected (or current) build log file path. The path points
- to the staging build file until the software is successfully installed,
- when it points to the file in the installation directory.
- """
- return self.install_log_path if self.spec.installed else self.log_path
-
- @classmethod
- def inject_flags(cls, name, flags):
- """
- flag_handler that injects all flags through the compiler wrapper.
- """
- return flags, None, None
-
- @classmethod
- def env_flags(cls, name, flags):
- """
- flag_handler that adds all flags to canonical environment variables.
- """
- return None, flags, None
-
- @classmethod
- def build_system_flags(cls, name, flags):
- """
- flag_handler that passes flags to the build system arguments. Any
- package using `build_system_flags` must also implement
- `flags_to_build_system_args`, or derive from a class that
- implements it. Currently, AutotoolsPackage and CMakePackage
- implement it.
- """
- return None, None, flags
-
- def setup_build_environment(self, env):
- """Sets up the build environment for a package.
-
- This method will be called before the current package prefix exists in
- Spack's store.
-
- Args:
- env (spack.util.environment.EnvironmentModifications): environment
- modifications to be applied when the package is built. Package authors
- can call methods on it to alter the build environment.
- """
- pass
-
- def setup_run_environment(self, env):
- """Sets up the run environment for a package.
-
- Args:
- env (spack.util.environment.EnvironmentModifications): environment
- modifications to be applied when the package is run. Package authors
- can call methods on it to alter the run environment.
- """
- pass
-
- def setup_dependent_build_environment(self, env, dependent_spec):
- """Sets up the build environment of packages that depend on this one.
-
- This is similar to ``setup_build_environment``, but it is used to
- modify the build environments of packages that *depend* on this one.
-
- This gives packages like Python and others that follow the extension
- model a way to implement common environment or compile-time settings
- for dependencies.
-
- This method will be called before the dependent package prefix exists
- in Spack's store.
-
- Examples:
- 1. Installing python modules generally requires ``PYTHONPATH``
- to point to the ``lib/pythonX.Y/site-packages`` directory in the
- module's install prefix. This method could be used to set that
- variable.
-
- Args:
- env (spack.util.environment.EnvironmentModifications): environment
- modifications to be applied when the dependent package is built.
- Package authors can call methods on it to alter the build environment.
-
- dependent_spec (spack.spec.Spec): the spec of the dependent package
- about to be built. This allows the extendee (self) to query
- the dependent's state. Note that *this* package's spec is
- available as ``self.spec``
- """
- pass
-
- def setup_dependent_run_environment(self, env, dependent_spec):
- """Sets up the run environment of packages that depend on this one.
-
- This is similar to ``setup_run_environment``, but it is used to
- modify the run environments of packages that *depend* on this one.
-
- This gives packages like Python and others that follow the extension
- model a way to implement common environment or run-time settings
- for dependencies.
-
- Args:
- env (spack.util.environment.EnvironmentModifications): environment
- modifications to be applied when the dependent package is run.
- Package authors can call methods on it to alter the build environment.
-
- dependent_spec (spack.spec.Spec): The spec of the dependent package
- about to be run. This allows the extendee (self) to query
- the dependent's state. Note that *this* package's spec is
- available as ``self.spec``
- """
- pass
-
- def setup_dependent_package(self, module, dependent_spec):
- """Set up Python module-scope variables for dependent packages.
-
- Called before the install() method of dependents.
-
- Default implementation does nothing, but this can be
- overridden by an extendable package to set up the module of
- its extensions. This is useful if there are some common steps
- to installing all extensions for a certain package.
-
- Examples:
-
- 1. Extensions often need to invoke the ``python`` interpreter
- from the Python installation being extended. This routine
- can put a ``python()`` Executable object in the module scope
- for the extension package to simplify extension installs.
-
- 2. MPI compilers could set some variables in the dependent's
- scope that point to ``mpicc``, ``mpicxx``, etc., allowing
- them to be called by common name regardless of which MPI is used.
-
- 3. BLAS/LAPACK implementations can set some variables
- indicating the path to their libraries, since these
- paths differ by BLAS/LAPACK implementation.
-
- Args:
- module (spack.package.PackageBase.module): The Python ``module``
- object of the dependent package. Packages can use this to set
- module-scope variables for the dependent to use.
-
- dependent_spec (spack.spec.Spec): The spec of the dependent package
- about to be built. This allows the extendee (self) to
- query the dependent's state. Note that *this*
- package's spec is available as ``self.spec``.
- """
- pass
-
- flag_handler = inject_flags
- # The flag handler method is called for each of the allowed compiler flags.
- # It returns a triple of inject_flags, env_flags, build_system_flags.
- # The flags returned as inject_flags are injected through the spack
- # compiler wrappers.
- # The flags returned as env_flags are passed to the build system through
- # the environment variables of the same name.
- # The flags returned as build_system_flags are passed to the build system
- # package subclass to be turned into the appropriate part of the standard
- # arguments. This is implemented for build system classes where
- # appropriate and will otherwise raise a NotImplementedError.
-
- def flags_to_build_system_args(self, flags):
- # Takes flags as a dict name: list of values
- if any(v for v in flags.values()):
- msg = 'The {0} build system'.format(self.__class__.__name__)
- msg += ' cannot take command line arguments for compiler flags'
- raise NotImplementedError(msg)
-
- @staticmethod
- def uninstall_by_spec(spec, force=False, deprecator=None):
- if not os.path.isdir(spec.prefix):
- # prefix may not exist, but DB may be inconsistent. Try to fix by
- # removing, but omit hooks.
- specs = spack.store.db.query(spec, installed=True)
- if specs:
- if deprecator:
- spack.store.db.deprecate(specs[0], deprecator)
- tty.debug('Deprecating stale DB entry for {0}'
- .format(spec.short_spec))
- else:
- spack.store.db.remove(specs[0])
- tty.debug('Removed stale DB entry for {0}'
- .format(spec.short_spec))
- return
- else:
- raise InstallError(str(spec) + " is not installed.")
-
- if not force:
- dependents = spack.store.db.installed_relatives(
- spec,
- direction='parents',
- transitive=True,
- deptype=("link", "run"),
- )
- if dependents:
- raise PackageStillNeededError(spec, dependents)
-
- # Try to get the package for the spec
- try:
- pkg = spec.package
- except spack.repo.UnknownEntityError:
- pkg = None
-
- # Pre-uninstall hook runs first.
- with spack.store.db.prefix_write_lock(spec):
-
- if pkg is not None:
- try:
- spack.hooks.pre_uninstall(spec)
- except Exception as error:
- if force:
- error_msg = (
- "One or more pre_uninstall hooks have failed"
- " for {0}, but Spack is continuing with the"
- " uninstall".format(str(spec)))
- if isinstance(error, spack.error.SpackError):
- error_msg += (
- "\n\nError message: {0}".format(str(error)))
- tty.warn(error_msg)
- # Note that if the uninstall succeeds then we won't be
- # seeing this error again and won't have another chance
- # to run the hook.
- else:
- raise
-
- # Uninstalling in Spack only requires removing the prefix.
- if not spec.external:
- msg = 'Deleting package prefix [{0}]'
- tty.debug(msg.format(spec.short_spec))
- # test if spec is already deprecated, not whether we want to
- # deprecate it now
- deprecated = bool(spack.store.db.deprecator(spec))
- spack.store.layout.remove_install_directory(spec, deprecated)
- # Delete DB entry
- if deprecator:
- msg = 'deprecating DB entry [{0}] in favor of [{1}]'
- tty.debug(msg.format(spec.short_spec, deprecator.short_spec))
- spack.store.db.deprecate(spec, deprecator)
- else:
- msg = 'Deleting DB entry [{0}]'
- tty.debug(msg.format(spec.short_spec))
- spack.store.db.remove(spec)
-
- if pkg is not None:
- try:
- spack.hooks.post_uninstall(spec)
- except Exception:
- # If there is a failure here, this is our only chance to do
- # something about it: at this point the Spec has been removed
- # from the DB and prefix, so the post-uninstallation hooks
- # will not have another chance to run.
- error_msg = (
- "One or more post-uninstallation hooks failed for"
- " {0}, but the prefix has been removed (if it is not"
- " external).".format(str(spec)))
- tb_msg = traceback.format_exc()
- error_msg += "\n\nThe error:\n\n{0}".format(tb_msg)
- tty.warn(error_msg)
-
- tty.msg('Successfully uninstalled {0}'.format(spec.short_spec))
-
- def do_uninstall(self, force=False):
- """Uninstall this package by spec."""
- # delegate to instance-less method.
- Package.uninstall_by_spec(self.spec, force)
-
- def do_deprecate(self, deprecator, link_fn):
- """Deprecate this package in favor of deprecator spec"""
- spec = self.spec
-
- # Check whether package to deprecate has active extensions
- if self.extendable:
- view = spack.filesystem_view.YamlFilesystemView(spec.prefix,
- spack.store.layout)
- active_exts = view.extensions_layout.extension_map(spec).values()
- if active_exts:
- short = spec.format('{name}/{hash:7}')
- m = "Spec %s has active extensions\n" % short
- for active in active_exts:
- m += ' %s\n' % active.format('{name}/{hash:7}')
- m += "Deactivate extensions before deprecating %s" % short
- tty.die(m)
-
- # Check whether package to deprecate is an active extension
- if self.is_extension:
- extendee = self.extendee_spec
- view = spack.filesystem_view.YamlFilesystemView(extendee.prefix,
- spack.store.layout)
-
- if self.is_activated(view):
- short = spec.format('{name}/{hash:7}')
- short_ext = extendee.format('{name}/{hash:7}')
- msg = "Spec %s is an active extension of %s\n" % (short,
- short_ext)
- msg += "Deactivate %s to be able to deprecate it" % short
- tty.die(msg)
-
- # Install deprecator if it isn't installed already
- if not spack.store.db.query(deprecator):
- deprecator.package.do_install()
-
- old_deprecator = spack.store.db.deprecator(spec)
- if old_deprecator:
- # Find this specs yaml file from its old deprecation
- self_yaml = spack.store.layout.deprecated_file_path(spec,
- old_deprecator)
- else:
- self_yaml = spack.store.layout.spec_file_path(spec)
-
- # copy spec metadata to "deprecated" dir of deprecator
- depr_yaml = spack.store.layout.deprecated_file_path(spec,
- deprecator)
- fsys.mkdirp(os.path.dirname(depr_yaml))
- shutil.copy2(self_yaml, depr_yaml)
-
- # Any specs deprecated in favor of this spec are re-deprecated in
- # favor of its new deprecator
- for deprecated in spack.store.db.specs_deprecated_by(spec):
- deprecated.package.do_deprecate(deprecator, link_fn)
-
- # Now that we've handled metadata, uninstall and replace with link
- Package.uninstall_by_spec(spec, force=True, deprecator=deprecator)
- link_fn(deprecator.prefix, spec.prefix)
-
- def _check_extendable(self):
- if not self.extendable:
- raise ValueError("Package %s is not extendable!" % self.name)
-
- def _sanity_check_extension(self):
- if not self.is_extension:
- raise ActivationError("This package is not an extension.")
-
- extendee_package = self.extendee_spec.package
- extendee_package._check_extendable()
-
- if not self.extendee_spec.installed:
- raise ActivationError(
- "Can only (de)activate extensions for installed packages.")
- if not self.spec.installed:
- raise ActivationError("Extensions must first be installed.")
- if self.extendee_spec.name not in self.extendees:
- raise ActivationError("%s does not extend %s!" %
- (self.name, self.extendee.name))
-
- def do_activate(self, view=None, with_dependencies=True, verbose=True):
- """Called on an extension to invoke the extendee's activate method.
-
- Commands should call this routine, and should not call
- activate() directly.
- """
- if verbose:
- tty.msg('Activating extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
-
- self._sanity_check_extension()
- if not view:
- view = YamlFilesystemView(
- self.extendee_spec.prefix, spack.store.layout)
-
- extensions_layout = view.extensions_layout
-
- try:
- extensions_layout.check_extension_conflict(
- self.extendee_spec, self.spec)
- except spack.directory_layout.ExtensionAlreadyInstalledError as e:
- # already installed, let caller know
- tty.msg(e.message)
- return
-
- # Activate any package dependencies that are also extensions.
- if with_dependencies:
- for spec in self.dependency_activations():
- if not spec.package.is_activated(view):
- spec.package.do_activate(
- view, with_dependencies=with_dependencies,
- verbose=verbose)
-
- self.extendee_spec.package.activate(
- self, view, **self.extendee_args)
-
- extensions_layout.add_extension(self.extendee_spec, self.spec)
-
- if verbose:
- tty.debug('Activated extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
-
- def dependency_activations(self):
- return (spec for spec in self.spec.traverse(root=False, deptype='run')
- if spec.package.extends(self.extendee_spec))
-
- def activate(self, extension, view, **kwargs):
- """
- Add the extension to the specified view.
-
- Package authors can override this function to maintain some
- centralized state related to the set of activated extensions
- for a package.
-
- Spack internals (commands, hooks, etc.) should call
- do_activate() method so that proper checks are always executed.
- """
- view.merge(extension.spec, ignore=kwargs.get('ignore', None))
-
- def do_deactivate(self, view=None, **kwargs):
- """Remove this extension package from the specified view. Called
- on the extension to invoke extendee's deactivate() method.
-
- `remove_dependents=True` deactivates extensions depending on this
- package instead of raising an error.
- """
- self._sanity_check_extension()
- force = kwargs.get('force', False)
- verbose = kwargs.get('verbose', True)
- remove_dependents = kwargs.get('remove_dependents', False)
-
- if verbose:
- tty.msg('Deactivating extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
-
- if not view:
- view = YamlFilesystemView(
- self.extendee_spec.prefix, spack.store.layout)
- extensions_layout = view.extensions_layout
-
- # Allow a force deactivate to happen. This can unlink
- # spurious files if something was corrupted.
- if not force:
- extensions_layout.check_activated(
- self.extendee_spec, self.spec)
-
- activated = extensions_layout.extension_map(
- self.extendee_spec)
- for name, aspec in activated.items():
- if aspec == self.spec:
- continue
- for dep in aspec.traverse(deptype='run'):
- if self.spec == dep:
- if remove_dependents:
- aspec.package.do_deactivate(**kwargs)
- else:
- msg = ('Cannot deactivate {0} because {1} is '
- 'activated and depends on it')
- raise ActivationError(msg.format(
- self.spec.cshort_spec, aspec.cshort_spec))
-
- self.extendee_spec.package.deactivate(
- self, view, **self.extendee_args)
-
- # redundant activation check -- makes SURE the spec is not
- # still activated even if something was wrong above.
- if self.is_activated(view):
- extensions_layout.remove_extension(
- self.extendee_spec, self.spec)
-
- if verbose:
- tty.debug('Deactivated extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
-
- def deactivate(self, extension, view, **kwargs):
- """
- Remove all extension files from the specified view.
-
- Package authors can override this method to support other
- extension mechanisms. Spack internals (commands, hooks, etc.)
- should call do_deactivate() method so that proper checks are
- always executed.
- """
- view.unmerge(extension.spec, ignore=kwargs.get('ignore', None))
-
- def view(self):
- """Create a view with the prefix of this package as the root.
- Extensions added to this view will modify the installation prefix of
- this package.
- """
- return YamlFilesystemView(self.prefix, spack.store.layout)
-
- def do_restage(self):
- """Reverts expanded/checked out source to a pristine state."""
- self.stage.restage()
-
- def do_clean(self):
- """Removes the package's build stage and source tarball."""
- for patch in self.spec.patches:
- patch.clean()
-
- self.stage.destroy()
-
- def format_doc(self, **kwargs):
- """Wrap doc string at 72 characters and format nicely"""
- indent = kwargs.get('indent', 0)
-
- if not self.__doc__:
- return ""
-
- doc = re.sub(r'\s+', ' ', self.__doc__)
- lines = textwrap.wrap(doc, 72)
- results = six.StringIO()
- for line in lines:
- results.write((" " * indent) + line + "\n")
- return results.getvalue()
-
- @property
- def all_urls(self):
- """A list of all URLs in a package.
-
- Check both class-level and version-specific URLs.
-
- Returns:
- list: a list of URLs
- """
- urls = []
- if hasattr(self, 'url') and self.url:
- urls.append(self.url)
-
- # fetch from first entry in urls to save time
- if hasattr(self, 'urls') and self.urls:
- urls.append(self.urls[0])
-
- for args in self.versions.values():
- if 'url' in args:
- urls.append(args['url'])
- return urls
-
- def fetch_remote_versions(self, concurrency=128):
- """Find remote versions of this package.
-
- Uses ``list_url`` and any other URLs listed in the package file.
-
- Returns:
- dict: a dictionary mapping versions to URLs
- """
- if not self.all_urls:
- return {}
-
- try:
- return spack.util.web.find_versions_of_archive(
- self.all_urls,
- self.list_url,
- self.list_depth,
- concurrency,
- reference_package=self,
- )
- except spack.util.web.NoNetworkConnectionError as e:
- tty.die("Package.fetch_versions couldn't connect to:", e.url,
- e.message)
-
- @property
- def rpath(self):
- """Get the rpath this package links with, as a list of paths."""
- rpaths = [self.prefix.lib, self.prefix.lib64]
- deps = self.spec.dependencies(deptype='link')
- rpaths.extend(d.prefix.lib for d in deps
- if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in deps
- if os.path.isdir(d.prefix.lib64))
- return rpaths
-
- @property
- def rpath_args(self):
- """
- Get the rpath args as a string, with -Wl,-rpath, for each element
- """
- return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
-
- def _run_test_callbacks(self, method_names, callback_type='install'):
- """Tries to call all of the listed methods, returning immediately
- if the list is None."""
- if method_names is None:
- return
-
- fail_fast = spack.config.get('config:fail_fast', False)
-
- with self._setup_test(verbose=False, externals=False) as logger:
- # Report running each of the methods in the build log
- print_test_message(
- logger, 'Running {0}-time tests'.format(callback_type), True)
-
- for name in method_names:
- try:
- fn = getattr(self, name)
-
- msg = 'RUN-TESTS: {0}-time tests [{1}]' \
- .format(callback_type, name),
- print_test_message(logger, msg, True)
-
- fn()
- except AttributeError as e:
- msg = 'RUN-TESTS: method not implemented [{0}]' \
- .format(name),
- print_test_message(logger, msg, True)
-
- self.test_failures.append((e, msg))
- if fail_fast:
- break
-
- # Raise any collected failures here
- if self.test_failures:
- raise TestFailure(self.test_failures)
-
- @on_package_attributes(run_tests=True)
- def _run_default_build_time_test_callbacks(self):
- """Tries to call all the methods that are listed in the attribute
- ``build_time_test_callbacks`` if ``self.run_tests is True``.
- """
- self._run_test_callbacks(self.build_time_test_callbacks, 'build')
-
- @on_package_attributes(run_tests=True)
- def _run_default_install_time_test_callbacks(self):
- """Tries to call all the methods that are listed in the attribute
- ``install_time_test_callbacks`` if ``self.run_tests is True``.
- """
- self._run_test_callbacks(self.install_time_test_callbacks, 'install')
-
-
-def has_test_method(pkg):
- """Determine if the package defines its own stand-alone test method.
-
- Args:
- pkg (str): the package being checked
-
- Returns:
- (bool): ``True`` if the package overrides the default method; else
- ``False``
- """
- if not inspect.isclass(pkg):
- tty.die('{0}: is not a class, it is {1}'.format(pkg, type(pkg)))
-
- return (
- (issubclass(pkg, PackageBase) and pkg.test != PackageBase.test) or
- (isinstance(pkg, PackageBase) and pkg.test.__func__ != PackageBase.test)
- )
-
-
-def print_test_message(logger, msg, verbose):
- if verbose:
- with logger.force_echo():
- tty.msg(msg)
- else:
- tty.msg(msg)
-
-
-def test_process(pkg, kwargs):
- verbose = kwargs.get('verbose', False)
- externals = kwargs.get('externals', False)
-
- with pkg._setup_test(verbose, externals) as logger:
- if pkg.spec.external and not externals:
- print_test_message(
- logger, 'Skipped tests for external package', verbose)
- return
-
- # run test methods from the package and all virtuals it
- # provides virtuals have to be deduped by name
- v_names = list(set([vspec.name
- for vspec in pkg.virtuals_provided]))
-
- # hack for compilers that are not dependencies (yet)
- # TODO: this all eventually goes away
- c_names = ('gcc', 'intel', 'intel-parallel-studio', 'pgi')
- if pkg.name in c_names:
- v_names.extend(['c', 'cxx', 'fortran'])
- if pkg.spec.satisfies('llvm+clang'):
- v_names.extend(['c', 'cxx'])
-
- test_specs = [pkg.spec] + [spack.spec.Spec(v_name)
- for v_name in sorted(v_names)]
-
- ran_actual_test_function = False
- try:
- with fsys.working_dir(pkg.test_suite.test_dir_for_spec(pkg.spec)):
- for spec in test_specs:
- pkg.test_suite.current_test_spec = spec
- # Fail gracefully if a virtual has no package/tests
- try:
- spec_pkg = spec.package
- except spack.repo.UnknownPackageError:
- continue
-
- # copy installed test sources cache into test cache dir
- if spec.concrete:
- cache_source = spec_pkg.install_test_root
- cache_dir = pkg.test_suite.current_test_cache_dir
- if (os.path.isdir(cache_source) and
- not os.path.exists(cache_dir)):
- fsys.install_tree(cache_source, cache_dir)
-
- # copy test data into test data dir
- data_source = Prefix(spec_pkg.package_dir).test
- data_dir = pkg.test_suite.current_test_data_dir
- if (os.path.isdir(data_source) and
- not os.path.exists(data_dir)):
- # We assume data dir is used read-only
- # maybe enforce this later
- shutil.copytree(data_source, data_dir)
-
- # grab the function for each method so we can call
- # it with the package
- test_fn = spec_pkg.__class__.test
- if not isinstance(test_fn, types.FunctionType):
- test_fn = test_fn.__func__
-
- # Skip any test methods consisting solely of 'pass'
- # since they do not contribute to package testing.
- source = (inspect.getsource(test_fn)).splitlines()[1:]
- lines = (ln.strip() for ln in source)
- statements = [ln for ln in lines if not ln.startswith('#')]
- if len(statements) > 0 and statements[0] == 'pass':
- continue
-
- # Run the tests
- ran_actual_test_function = True
- context = logger.force_echo if verbose else nullcontext
- with context():
- test_fn(pkg)
-
- # If fail-fast was on, we error out above
- # If we collect errors, raise them in batch here
- if pkg.test_failures:
- raise TestFailure(pkg.test_failures)
-
- finally:
- # flag the package as having been tested (i.e., ran one or more
- # non-pass-only methods
- if ran_actual_test_function:
- fsys.touch(pkg.tested_file)
- else:
- print_test_message(logger, 'No tests to run', verbose)
-
-
-inject_flags = PackageBase.inject_flags
-env_flags = PackageBase.env_flags
-build_system_flags = PackageBase.build_system_flags
-
-
-class BundlePackage(PackageBase):
- """General purpose bundle, or no-code, package class."""
- #: There are no phases by default but the property is required to support
- #: post-install hooks (e.g., for module generation).
- phases = [] # type: List[str]
- #: This attribute is used in UI queries that require to know which
- #: build-system class we are using
- build_system_class = 'BundlePackage'
-
- #: Bundle packages do not have associated source or binary code.
- has_code = False
-
-
-class Package(PackageBase):
- """General purpose class with a single ``install``
- phase that needs to be coded by packagers.
- """
- #: The one and only phase
- phases = ['install']
- #: This attribute is used in UI queries that require to know which
- #: build-system class we are using
- build_system_class = 'Package'
- # This will be used as a registration decorator in user
- # packages, if need be
- run_after('install')(PackageBase.sanity_check_prefix)
- # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
- run_after('install')(PackageBase.apply_macos_rpath_fixups)
-
-
-def install_dependency_symlinks(pkg, spec, prefix):
- """
- Execute a dummy install and flatten dependencies.
-
- This routine can be used in a ``package.py`` definition by setting
- ``install = install_dependency_symlinks``.
-
- This feature comes in handy for creating a common location for the
- the installation of third-party libraries.
- """
- flatten_dependencies(spec, prefix)
-
-
-def use_cray_compiler_names():
- """Compiler names for builds that rely on cray compiler names."""
- os.environ['CC'] = 'cc'
- os.environ['CXX'] = 'CC'
- os.environ['FC'] = 'ftn'
- os.environ['F77'] = 'ftn'
-
-
-def flatten_dependencies(spec, flat_dir):
- """Make each dependency of spec present in dir via symlink."""
- for dep in spec.traverse(root=False):
- name = dep.name
-
- dep_path = spack.store.layout.path_for_spec(dep)
- dep_files = LinkTree(dep_path)
-
- os.mkdir(flat_dir + '/' + name)
-
- conflict = dep_files.find_conflict(flat_dir + '/' + name)
- if conflict:
- raise DependencyConflictError(conflict)
-
- dep_files.merge(flat_dir + '/' + name)
-
-
-def possible_dependencies(*pkg_or_spec, **kwargs):
- """Get the possible dependencies of a number of packages.
-
- See ``PackageBase.possible_dependencies`` for details.
- """
- packages = []
- for pos in pkg_or_spec:
- if isinstance(pos, PackageMeta):
- packages.append(pos)
- continue
-
- if not isinstance(pos, spack.spec.Spec):
- pos = spack.spec.Spec(pos)
-
- if spack.repo.path.is_virtual(pos.name):
- packages.extend(
- p.package_class
- for p in spack.repo.path.providers_for(pos.name)
- )
- continue
- else:
- packages.append(pos.package_class)
-
- visited = {}
- for pkg in packages:
- pkg.possible_dependencies(visited=visited, **kwargs)
-
- return visited
-
-
-class FetchError(spack.error.SpackError):
- """Raised when something goes wrong during fetch."""
-
- def __init__(self, message, long_msg=None):
- super(FetchError, self).__init__(message, long_msg)
-
-
-class PackageStillNeededError(InstallError):
- """Raised when package is still needed by another on uninstall."""
- def __init__(self, spec, dependents):
- super(PackageStillNeededError, self).__init__("Cannot uninstall %s" %
- spec)
- self.spec = spec
- self.dependents = dependents
-
-
-class PackageError(spack.error.SpackError):
- """Raised when something is wrong with a package definition."""
- def __init__(self, message, long_msg=None):
- super(PackageError, self).__init__(message, long_msg)
-
-
-class PackageVersionError(PackageError):
- """Raised when a version URL cannot automatically be determined."""
- def __init__(self, version):
- super(PackageVersionError, self).__init__(
- "Cannot determine a URL automatically for version %s" % version,
- "Please provide a url for this version in the package.py file.")
-
-
-class NoURLError(PackageError):
- """Raised when someone tries to build a URL for a package with no URLs."""
-
- def __init__(self, cls):
- super(NoURLError, self).__init__(
- "Package %s has no version with a URL." % cls.__name__)
-
-
-class InvalidPackageOpError(PackageError):
- """Raised when someone tries perform an invalid operation on a package."""
-
-
-class ExtensionError(PackageError):
- """Superclass for all errors having to do with extension packages."""
-
-
-class ActivationError(ExtensionError):
- """Raised when there are problems activating an extension."""
- def __init__(self, msg, long_msg=None):
- super(ActivationError, self).__init__(msg, long_msg)
-
-
-class DependencyConflictError(spack.error.SpackError):
- """Raised when the dependencies cannot be flattened as asked for."""
- def __init__(self, conflict):
- super(DependencyConflictError, self).__init__(
- "%s conflicts with another file in the flattened directory." % (
- conflict))
+# import most common types used in packages
+from typing import Dict, List, Optional
+
+import llnl.util.filesystem
+from llnl.util.filesystem import *
+
+import spack.util.executable
+from spack.build_systems.aspell_dict import AspellDictPackage
+from spack.build_systems.autotools import AutotoolsPackage
+from spack.build_systems.cached_cmake import (
+ CachedCMakePackage,
+ cmake_cache_option,
+ cmake_cache_path,
+ cmake_cache_string,
+)
+from spack.build_systems.cmake import CMakePackage
+from spack.build_systems.cuda import CudaPackage
+from spack.build_systems.gnu import GNUMirrorPackage
+from spack.build_systems.intel import IntelPackage
+from spack.build_systems.lua import LuaPackage
+from spack.build_systems.makefile import MakefilePackage
+from spack.build_systems.maven import MavenPackage
+from spack.build_systems.meson import MesonPackage
+from spack.build_systems.octave import OctavePackage
+from spack.build_systems.oneapi import (
+ IntelOneApiLibraryPackage,
+ IntelOneApiPackage,
+ IntelOneApiStaticLibraryList,
+)
+from spack.build_systems.perl import PerlPackage
+from spack.build_systems.python import PythonPackage
+from spack.build_systems.qmake import QMakePackage
+from spack.build_systems.r import RPackage
+from spack.build_systems.racket import RacketPackage
+from spack.build_systems.rocm import ROCmPackage
+from spack.build_systems.ruby import RubyPackage
+from spack.build_systems.scons import SConsPackage
+from spack.build_systems.sip import SIPPackage
+from spack.build_systems.sourceforge import SourceforgePackage
+from spack.build_systems.sourceware import SourcewarePackage
+from spack.build_systems.waf import WafPackage
+from spack.build_systems.xorg import XorgPackage
+from spack.dependency import all_deptypes
+from spack.directives import *
+from spack.install_test import get_escaped_text_output
+from spack.installer import (
+ ExternalPackageError,
+ InstallError,
+ InstallLockError,
+ UpstreamPackageError,
+)
+from spack.mixins import filter_compiler_wrappers
+from spack.multimethod import when
+from spack.package_base import (
+ BundlePackage,
+ DependencyConflictError,
+ Package,
+ build_system_flags,
+ env_flags,
+ flatten_dependencies,
+ inject_flags,
+ install_dependency_symlinks,
+ on_package_attributes,
+ run_after,
+ run_before,
+)
+from spack.spec import InvalidSpecDetected, Spec
+from spack.util.executable import *
+from spack.variant import (
+ any_combination_of,
+ auto_or_any_combination_of,
+ conditional,
+ disjoint_sets,
+)
+from spack.version import Version, ver \ No newline at end of file
diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py
new file mode 100644
index 0000000000..1c736fb326
--- /dev/null
+++ b/lib/spack/spack/package_base.py
@@ -0,0 +1,3107 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+"""This is where most of the action happens in Spack.
+
+The spack package class structure is based strongly on Homebrew
+(http://brew.sh/), mainly because Homebrew makes it very easy to create
+packages.
+"""
+
+import base64
+import collections
+import contextlib
+import copy
+import functools
+import glob
+import hashlib
+import inspect
+import os
+import re
+import shutil
+import sys
+import textwrap
+import time
+import traceback
+import types
+import warnings
+from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm
+
+import six
+
+import llnl.util.filesystem as fsys
+import llnl.util.tty as tty
+from llnl.util.lang import memoized, nullcontext
+from llnl.util.link_tree import LinkTree
+
+import spack.compilers
+import spack.config
+import spack.dependency
+import spack.directives
+import spack.directory_layout
+import spack.environment
+import spack.error
+import spack.fetch_strategy as fs
+import spack.hooks
+import spack.mirror
+import spack.mixins
+import spack.multimethod
+import spack.paths
+import spack.repo
+import spack.store
+import spack.url
+import spack.util.environment
+import spack.util.path
+import spack.util.web
+from spack.filesystem_view import YamlFilesystemView
+from spack.install_test import TestFailure, TestSuite
+from spack.installer import InstallError, PackageInstaller
+from spack.stage import ResourceStage, Stage, StageComposite, stage_prefix
+from spack.util.executable import ProcessError, which
+from spack.util.package_hash import package_hash
+from spack.util.prefix import Prefix
+from spack.version import Version
+
+if sys.version_info[0] >= 3:
+ FLAG_HANDLER_RETURN_TYPE = Tuple[
+ Optional[Iterable[str]],
+ Optional[Iterable[str]],
+ Optional[Iterable[str]],
+ ]
+ FLAG_HANDLER_TYPE = Callable[
+ [str, Iterable[str]],
+ FLAG_HANDLER_RETURN_TYPE
+ ]
+
+"""Allowed URL schemes for spack packages."""
+_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
+
+
+# Filename for the Spack build/install log.
+_spack_build_logfile = 'spack-build-out.txt'
+
+# Filename for the Spack build/install environment file.
+_spack_build_envfile = 'spack-build-env.txt'
+
+# Filename for the Spack build/install environment modifications file.
+_spack_build_envmodsfile = 'spack-build-env-mods.txt'
+
+# Filename for the Spack install phase-time test log.
+_spack_install_test_log = 'install-time-test-log.txt'
+
+# Filename of json with total build and phase times (seconds)
+_spack_times_log = 'install_times.json'
+
+# Filename for the Spack configure args file.
+_spack_configure_argsfile = 'spack-configure-args.txt'
+
+
+def preferred_version(pkg):
+ """
+ Returns a sorted list of the preferred versions of the package.
+
+ Arguments:
+ pkg (Package): The package whose versions are to be assessed.
+ """
+ # Here we sort first on the fact that a version is marked
+ # as preferred in the package, then on the fact that the
+ # version is not develop, then lexicographically
+ key_fn = lambda v: (pkg.versions[v].get('preferred', False),
+ not v.isdevelop(),
+ v)
+ return sorted(pkg.versions, key=key_fn).pop()
+
+
+class InstallPhase(object):
+ """Manages a single phase of the installation.
+
+ This descriptor stores at creation time the name of the method it should
+ search for execution. The method is retrieved at __get__ time, so that
+ it can be overridden by subclasses of whatever class declared the phases.
+
+ It also provides hooks to execute arbitrary callbacks before and after
+ the phase.
+ """
+
+ def __init__(self, name):
+ self.name = name
+ self.run_before = []
+ self.run_after = []
+
+ def __get__(self, instance, owner):
+ # The caller is a class that is trying to customize
+ # my behavior adding something
+ if instance is None:
+ return self
+ # If instance is there the caller wants to execute the
+ # install phase, thus return a properly set wrapper
+ phase = getattr(instance, self.name)
+
+ @functools.wraps(phase)
+ def phase_wrapper(spec, prefix):
+ # Check instance attributes at the beginning of a phase
+ self._on_phase_start(instance)
+ # Execute phase pre-conditions,
+ # and give them the chance to fail
+ for callback in self.run_before:
+ callback(instance)
+ phase(spec, prefix)
+ # Execute phase sanity_checks,
+ # and give them the chance to fail
+ for callback in self.run_after:
+ callback(instance)
+ # Check instance attributes at the end of a phase
+ self._on_phase_exit(instance)
+ return phase_wrapper
+
+ def _on_phase_start(self, instance):
+ # If a phase has a matching stop_before_phase attribute,
+ # stop the installation process raising a StopPhase
+ if getattr(instance, 'stop_before_phase', None) == self.name:
+ from spack.build_environment import StopPhase
+ raise StopPhase('Stopping before \'{0}\' phase'.format(self.name))
+
+ def _on_phase_exit(self, instance):
+ # If a phase has a matching last_phase attribute,
+ # stop the installation process raising a StopPhase
+ if getattr(instance, 'last_phase', None) == self.name:
+ from spack.build_environment import StopPhase
+ raise StopPhase('Stopping at \'{0}\' phase'.format(self.name))
+
+ def copy(self):
+ try:
+ return copy.deepcopy(self)
+ except TypeError:
+ # This bug-fix was not back-ported in Python 2.6
+ # http://bugs.python.org/issue1515
+ other = InstallPhase(self.name)
+ other.run_before.extend(self.run_before)
+ other.run_after.extend(self.run_after)
+ return other
+
+
+#: Registers which are the detectable packages, by repo and package name
+#: Need a pass of package repositories to be filled.
+detectable_packages = collections.defaultdict(list)
+
+
+class DetectablePackageMeta(object):
+ """Check if a package is detectable and add default implementations
+ for the detection function.
+ """
+ def __init__(cls, name, bases, attr_dict):
+ if hasattr(cls, 'executables') and hasattr(cls, 'libraries'):
+ msg = "a package can have either an 'executables' or 'libraries' attribute"
+ msg += " [package '{0.name}' defines both]"
+ raise spack.error.SpackError(msg.format(cls))
+
+ # On windows, extend the list of regular expressions to look for
+ # filenames ending with ".exe"
+ # (in some cases these regular expressions include "$" to avoid
+ # pulling in filenames with unexpected suffixes, but this allows
+ # for example detecting "foo.exe" when the package writer specified
+ # that "foo" was a possible executable.
+
+ # If a package has the executables or libraries attribute then it's
+ # assumed to be detectable
+ if hasattr(cls, 'executables') or hasattr(cls, 'libraries'):
+ @property
+ def platform_executables(self):
+ def to_windows_exe(exe):
+ if exe.endswith('$'):
+ exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
+ else:
+ exe += spack.util.path.win_exe_ext()
+ return exe
+ plat_exe = []
+ if hasattr(self, 'executables'):
+ for exe in self.executables:
+ if sys.platform == 'win32':
+ exe = to_windows_exe(exe)
+ plat_exe.append(exe)
+ return plat_exe
+
+ @classmethod
+ def determine_spec_details(cls, prefix, objs_in_prefix):
+ """Allow ``spack external find ...`` to locate installations.
+
+ Args:
+ prefix (str): the directory containing the executables
+ or libraries
+ objs_in_prefix (set): the executables or libraries that
+ match the regex
+
+ Returns:
+ The list of detected specs for this package
+ """
+ objs_by_version = collections.defaultdict(list)
+ # The default filter function is the identity function for the
+ # list of executables
+ filter_fn = getattr(cls, 'filter_detected_exes',
+ lambda x, exes: exes)
+ objs_in_prefix = filter_fn(prefix, objs_in_prefix)
+ for obj in objs_in_prefix:
+ try:
+ version_str = cls.determine_version(obj)
+ if version_str:
+ objs_by_version[version_str].append(obj)
+ except Exception as e:
+ msg = ('An error occurred when trying to detect '
+ 'the version of "{0}" [{1}]')
+ tty.debug(msg.format(obj, str(e)))
+
+ specs = []
+ for version_str, objs in objs_by_version.items():
+ variants = cls.determine_variants(objs, version_str)
+ # Normalize output to list
+ if not isinstance(variants, list):
+ variants = [variants]
+
+ for variant in variants:
+ if isinstance(variant, six.string_types):
+ variant = (variant, {})
+ variant_str, extra_attributes = variant
+ spec_str = '{0}@{1} {2}'.format(
+ cls.name, version_str, variant_str
+ )
+
+ # Pop a few reserved keys from extra attributes, since
+ # they have a different semantics
+ external_path = extra_attributes.pop('prefix', None)
+ external_modules = extra_attributes.pop(
+ 'modules', None
+ )
+ try:
+ spec = spack.spec.Spec(
+ spec_str,
+ external_path=external_path,
+ external_modules=external_modules
+ )
+ except Exception as e:
+ msg = 'Parsing failed [spec_str="{0}", error={1}]'
+ tty.debug(msg.format(spec_str, str(e)))
+ else:
+ specs.append(spack.spec.Spec.from_detection(
+ spec, extra_attributes=extra_attributes
+ ))
+
+ return sorted(specs)
+
+ @classmethod
+ def determine_variants(cls, objs, version_str):
+ return ''
+
+ # Register the class as a detectable package
+ detectable_packages[cls.namespace].append(cls.name)
+
+ # Attach function implementations to the detectable class
+ default = False
+ if not hasattr(cls, 'determine_spec_details'):
+ default = True
+ cls.determine_spec_details = determine_spec_details
+
+ if default and not hasattr(cls, 'determine_version'):
+ msg = ('the package "{0}" in the "{1}" repo needs to define'
+ ' the "determine_version" method to be detectable')
+ NotImplementedError(msg.format(cls.name, cls.namespace))
+
+ if default and not hasattr(cls, 'determine_variants'):
+ cls.determine_variants = determine_variants
+
+ # This function should not be overridden by subclasses,
+ # as it is not designed for bespoke pkg detection but rather
+ # on a per-platform basis
+ if 'platform_executables' in cls.__dict__.keys():
+ raise PackageError("Packages should not override platform_executables")
+ cls.platform_executables = platform_executables
+
+ super(DetectablePackageMeta, cls).__init__(name, bases, attr_dict)
+
+
+class PackageMeta(
+ DetectablePackageMeta,
+ spack.directives.DirectiveMeta,
+ spack.mixins.PackageMixinsMeta,
+ spack.multimethod.MultiMethodMeta
+):
+ """
+ Package metaclass for supporting directives (e.g., depends_on) and phases
+ """
+ phase_fmt = '_InstallPhase_{0}'
+
+ # These are accessed only through getattr, by name
+ _InstallPhase_run_before = {} # type: Dict[str, List[Callable]]
+ _InstallPhase_run_after = {} # type: Dict[str, List[Callable]]
+
+ def __new__(cls, name, bases, attr_dict):
+ """
+ Instance creation is preceded by phase attribute transformations.
+
+ Conveniently transforms attributes to permit extensible phases by
+ iterating over the attribute 'phases' and creating / updating private
+ InstallPhase attributes in the class that will be initialized in
+ __init__.
+ """
+ if 'phases' in attr_dict:
+ # Turn the strings in 'phases' into InstallPhase instances
+ # and add them as private attributes
+ _InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict['phases']] # NOQA: ignore=E501
+ for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict['phases']): # NOQA: ignore=E501
+ attr_dict[phase_name] = InstallPhase(callback_name)
+ attr_dict['_InstallPhase_phases'] = _InstallPhase_phases
+
+ def _flush_callbacks(check_name):
+ # Name of the attribute I am going to check it exists
+ check_attr = PackageMeta.phase_fmt.format(check_name)
+ checks = getattr(cls, check_attr)
+ if checks:
+ for phase_name, funcs in checks.items():
+ phase_attr = PackageMeta.phase_fmt.format(phase_name)
+ try:
+ # Search for the phase in the attribute dictionary
+ phase = attr_dict[phase_attr]
+ except KeyError:
+ # If it is not there it's in the bases
+ # and we added a check. We need to copy
+ # and extend
+ for base in bases:
+ phase = getattr(base, phase_attr, None)
+ if phase is not None:
+ break
+
+ phase = attr_dict[phase_attr] = phase.copy()
+ getattr(phase, check_name).extend(funcs)
+ # Clear the attribute for the next class
+ setattr(cls, check_attr, {})
+
+ _flush_callbacks('run_before')
+ _flush_callbacks('run_after')
+
+ # Reset names for packages that inherit from another
+ # package with a different name
+ attr_dict['_name'] = None
+
+ return super(PackageMeta, cls).__new__(cls, name, bases, attr_dict)
+
+ @staticmethod
+ def register_callback(check_type, *phases):
+ def _decorator(func):
+ attr_name = PackageMeta.phase_fmt.format(check_type)
+ check_list = getattr(PackageMeta, attr_name)
+ for item in phases:
+ checks = check_list.setdefault(item, [])
+ checks.append(func)
+ setattr(PackageMeta, attr_name, check_list)
+ return func
+ return _decorator
+
+ @property
+ def package_dir(self):
+ """Directory where the package.py file lives."""
+ return os.path.abspath(os.path.dirname(self.module.__file__))
+
+ @property
+ def module(self):
+ """Module object (not just the name) that this package is defined in.
+
+ We use this to add variables to package modules. This makes
+ install() methods easier to write (e.g., can call configure())
+ """
+ return __import__(self.__module__, fromlist=[self.__name__])
+
+ @property
+ def namespace(self):
+ """Spack namespace for the package, which identifies its repo."""
+ return spack.repo.namespace_from_fullname(self.__module__)
+
+ @property
+ def fullname(self):
+ """Name of this package, including the namespace"""
+ return '%s.%s' % (self.namespace, self.name)
+
+ @property
+ def fullnames(self):
+ """
+ Fullnames for this package and any packages from which it inherits.
+ """
+ fullnames = []
+ for cls in inspect.getmro(self):
+ namespace = getattr(cls, 'namespace', None)
+ if namespace:
+ fullnames.append('%s.%s' % (namespace, self.name))
+ if namespace == 'builtin':
+ # builtin packages cannot inherit from other repos
+ break
+ return fullnames
+
+ @property
+ def name(self):
+ """The name of this package.
+
+ The name of a package is the name of its Python module, without
+ the containing module names.
+ """
+ if self._name is None:
+ self._name = self.module.__name__
+ if '.' in self._name:
+ self._name = self._name[self._name.rindex('.') + 1:]
+ return self._name
+
+ @property
+ def global_license_dir(self):
+ """Returns the directory where license files for all packages are stored."""
+ return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
+
+
+def run_before(*phases):
+ """Registers a method of a package to be run before a given phase"""
+ return PackageMeta.register_callback('run_before', *phases)
+
+
+def run_after(*phases):
+ """Registers a method of a package to be run after a given phase"""
+ return PackageMeta.register_callback('run_after', *phases)
+
+
+def on_package_attributes(**attr_dict):
+ """Decorator: executes instance function only if object has attr valuses.
+
+ Executes the decorated method only if at the moment of calling the
+ instance has attributes that are equal to certain values.
+
+ Args:
+ attr_dict (dict): dictionary mapping attribute names to their
+ required values
+ """
+ def _execute_under_condition(func):
+
+ @functools.wraps(func)
+ def _wrapper(instance, *args, **kwargs):
+ # If all the attributes have the value we require, then execute
+ has_all_attributes = all(
+ [hasattr(instance, key) for key in attr_dict]
+ )
+ if has_all_attributes:
+ has_the_right_values = all(
+ [getattr(instance, key) == value for key, value in attr_dict.items()] # NOQA: ignore=E501
+ )
+ if has_the_right_values:
+ func(instance, *args, **kwargs)
+ return _wrapper
+
+ return _execute_under_condition
+
+
+class PackageViewMixin(object):
+ """This collects all functionality related to adding installed Spack
+ package to views. Packages can customize how they are added to views by
+ overriding these functions.
+ """
+ def view_source(self):
+ """The source root directory that will be added to the view: files are
+ added such that their path relative to the view destination matches
+ their path relative to the view source.
+ """
+ return self.spec.prefix
+
+ def view_destination(self, view):
+ """The target root directory: each file is added relative to this
+ directory.
+ """
+ return view.get_projection_for_spec(self.spec)
+
+ def view_file_conflicts(self, view, merge_map):
+ """Report any files which prevent adding this package to the view. The
+ default implementation looks for any files which already exist.
+ Alternative implementations may allow some of the files to exist in
+ the view (in this case they would be omitted from the results).
+ """
+ return set(dst for dst in merge_map.values() if os.path.lexists(dst))
+
+ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
+ """Given a map of package files to destination paths in the view, add
+ the files to the view. By default this adds all files. Alternative
+ implementations may skip some files, for example if other packages
+ linked into the view already include the file.
+
+ Args:
+ view (spack.filesystem_view.FilesystemView): the view that's updated
+ merge_map (dict): maps absolute source paths to absolute dest paths for
+ all files in from this package.
+ skip_if_exists (bool): when True, don't link files in view when they
+ already exist. When False, always link files, without checking
+ if they already exist.
+ """
+ if skip_if_exists:
+ for src, dst in merge_map.items():
+ if not os.path.lexists(dst):
+ view.link(src, dst, spec=self.spec)
+ else:
+ for src, dst in merge_map.items():
+ view.link(src, dst, spec=self.spec)
+
+ def remove_files_from_view(self, view, merge_map):
+ """Given a map of package files to files currently linked in the view,
+ remove the files from the view. The default implementation removes all
+ files. Alternative implementations may not remove all files. For
+ example if two packages include the same file, it should only be
+ removed when both packages are removed.
+ """
+ view.remove_files(merge_map.values())
+
+
+def test_log_pathname(test_stage, spec):
+ """Build the pathname of the test log file
+
+ Args:
+ test_stage (str): path to the test stage directory
+ spec (spack.spec.Spec): instance of the spec under test
+
+ Returns:
+ (str): the pathname of the test log file
+ """
+ return os.path.join(test_stage,
+ 'test-{0}-out.txt'.format(TestSuite.test_pkg_id(spec)))
+
+
+class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
+ """This is the superclass for all spack packages.
+
+ ***The Package class***
+
+ At its core, a package consists of a set of software to be installed.
+ A package may focus on a piece of software and its associated software
+ dependencies or it may simply be a set, or bundle, of software. The
+ former requires defining how to fetch, verify (via, e.g., sha256), build,
+ and install that software and the packages it depends on, so that
+ dependencies can be installed along with the package itself. The latter,
+ sometimes referred to as a ``no-source`` package, requires only defining
+ the packages to be built.
+
+ Packages are written in pure Python.
+
+ There are two main parts of a Spack package:
+
+ 1. **The package class**. Classes contain ``directives``, which are
+ special functions, that add metadata (versions, patches,
+ dependencies, and other information) to packages (see
+ ``directives.py``). Directives provide the constraints that are
+ used as input to the concretizer.
+
+ 2. **Package instances**. Once instantiated, a package is
+ essentially a software installer. Spack calls methods like
+ ``do_install()`` on the ``Package`` object, and it uses those to
+ drive user-implemented methods like ``patch()``, ``install()``, and
+ other build steps. To install software, an instantiated package
+ needs a *concrete* spec, which guides the behavior of the various
+ install methods.
+
+ Packages are imported from repos (see ``repo.py``).
+
+ **Package DSL**
+
+ Look in ``lib/spack/docs`` or check https://spack.readthedocs.io for
+ the full documentation of the package domain-specific language. That
+ used to be partially documented here, but as it grew, the docs here
+ became increasingly out of date.
+
+ **Package Lifecycle**
+
+ A package's lifecycle over a run of Spack looks something like this:
+
+ .. code-block:: python
+
+ p = Package() # Done for you by spack
+
+ p.do_fetch() # downloads tarball from a URL (or VCS)
+ p.do_stage() # expands tarball in a temp directory
+ p.do_patch() # applies patches to expanded source
+ p.do_install() # calls package's install() function
+ p.do_uninstall() # removes install directory
+
+ although packages that do not have code have nothing to fetch so omit
+ ``p.do_fetch()``.
+
+ There are also some other commands that clean the build area:
+
+ .. code-block:: python
+
+ p.do_clean() # removes the stage directory entirely
+ p.do_restage() # removes the build directory and
+ # re-expands the archive.
+
+ The convention used here is that a ``do_*`` function is intended to be
+ called internally by Spack commands (in ``spack.cmd``). These aren't for
+ package writers to override, and doing so may break the functionality
+ of the Package class.
+
+ Package creators have a lot of freedom, and they could technically
+ override anything in this class. That is not usually required.
+
+ For most use cases. Package creators typically just add attributes
+ like ``homepage`` and, for a code-based package, ``url``, or functions
+ such as ``install()``.
+ There are many custom ``Package`` subclasses in the
+ ``spack.build_systems`` package that make things even easier for
+ specific build systems.
+
+ """
+ #
+ # These are default values for instance variables.
+ #
+
+ #: A list or set of build time test functions to be called when tests
+ #: are executed or 'None' if there are no such test functions.
+ build_time_test_callbacks = None # type: Optional[List[str]]
+
+ #: By default, packages are not virtual
+ #: Virtual packages override this attribute
+ virtual = False
+
+ #: Most Spack packages are used to install source or binary code while
+ #: those that do not can be used to install a set of other Spack packages.
+ has_code = True
+
+ #: A list or set of install time test functions to be called when tests
+ #: are executed or 'None' if there are no such test functions.
+ install_time_test_callbacks = None # type: Optional[List[str]]
+
+ #: By default we build in parallel. Subclasses can override this.
+ parallel = True
+
+ #: By default do not run tests within package's install()
+ run_tests = False
+
+ # FIXME: this is a bad object-oriented design, should be moved to Clang.
+ #: By default do not setup mockup XCode on macOS with Clang
+ use_xcode = False
+
+ #: Most packages are NOT extendable. Set to True if you want extensions.
+ extendable = False
+
+ #: When True, add RPATHs for the entire DAG. When False, add RPATHs only
+ #: for immediate dependencies.
+ transitive_rpaths = True
+
+ #: List of prefix-relative file paths (or a single path). If these do
+ #: not exist after install, or if they exist but are not files,
+ #: sanity checks fail.
+ sanity_check_is_file = [] # type: List[str]
+
+ #: List of prefix-relative directory paths (or a single path). If
+ #: these do not exist after install, or if they exist but are not
+ #: directories, sanity checks will fail.
+ sanity_check_is_dir = [] # type: List[str]
+
+ #: List of glob expressions. Each expression must either be
+ #: absolute or relative to the package source path.
+ #: Matching artifacts found at the end of the build process will be
+ #: copied in the same directory tree as _spack_build_logfile and
+ #: _spack_build_envfile.
+ archive_files = [] # type: List[str]
+
+ #: Boolean. Set to ``True`` for packages that require a manual download.
+ #: This is currently used by package sanity tests and generation of a
+ #: more meaningful fetch failure error.
+ manual_download = False
+
+ #: Set of additional options used when fetching package versions.
+ fetch_options = {} # type: Dict[str, Any]
+
+ #
+ # Set default licensing information
+ #
+ #: Boolean. If set to ``True``, this software requires a license.
+ #: If set to ``False``, all of the ``license_*`` attributes will
+ #: be ignored. Defaults to ``False``.
+ license_required = False
+
+ #: String. Contains the symbol used by the license manager to denote
+ #: a comment. Defaults to ``#``.
+ license_comment = '#'
+
+ #: List of strings. These are files that the software searches for when
+ #: looking for a license. All file paths must be relative to the
+ #: installation directory. More complex packages like Intel may require
+ #: multiple licenses for individual components. Defaults to the empty list.
+ license_files = [] # type: List[str]
+
+ #: List of strings. Environment variables that can be set to tell the
+ #: software where to look for a license if it is not in the usual location.
+ #: Defaults to the empty list.
+ license_vars = [] # type: List[str]
+
+ #: String. A URL pointing to license setup instructions for the software.
+ #: Defaults to the empty string.
+ license_url = ''
+
+ #: Verbosity level, preserved across installs.
+ _verbose = None
+
+ #: index of patches by sha256 sum, built lazily
+ _patches_by_hash = None
+
+ #: Package homepage where users can find more information about the package
+ homepage = None # type: str
+
+ #: Default list URL (place to find available versions)
+ list_url = None # type: str
+
+ #: Link depth to which list_url should be searched for new versions
+ list_depth = 0
+
+ #: List of strings which contains GitHub usernames of package maintainers.
+ #: Do not include @ here in order not to unnecessarily ping the users.
+ maintainers = [] # type: List[str]
+
+ #: List of attributes to be excluded from a package's hash.
+ metadata_attrs = [
+ "homepage",
+ "url",
+ "urls",
+ "list_url",
+ "extendable",
+ "parallel",
+ "make_jobs",
+ "maintainers",
+ "tags",
+ ]
+
+ #: Boolean. If set to ``True``, the smoke/install test requires a compiler.
+ #: This is currently used by smoke tests to ensure a compiler is available
+ #: to build a custom test code.
+ test_requires_compiler = False
+
+ #: List of test failures encountered during a smoke/install test run.
+ test_failures = None
+
+ #: TestSuite instance used to manage smoke/install tests for one or more
+ #: specs.
+ test_suite = None
+
+ def __init__(self, spec):
+ # this determines how the package should be built.
+ self.spec = spec
+
+ # Allow custom staging paths for packages
+ self.path = None
+
+ # Keep track of whether or not this package was installed from
+ # a binary cache.
+ self.installed_from_binary_cache = False
+
+ # Ensure that only one of these two attributes are present
+ if getattr(self, 'url', None) and getattr(self, 'urls', None):
+ msg = "a package can have either a 'url' or a 'urls' attribute"
+ msg += " [package '{0.name}' defines both]"
+ raise ValueError(msg.format(self))
+
+ # init internal variables
+ self._stage = None
+ self._fetcher = None
+
+ # Set up timing variables
+ self._fetch_time = 0.0
+
+ if self.is_extension:
+ spack.repo.get(self.extendee_spec)._check_extendable()
+
+ super(PackageBase, self).__init__()
+
+ @classmethod
+ def possible_dependencies(
+ cls, transitive=True, expand_virtuals=True, deptype='all',
+ visited=None, missing=None, virtuals=None):
+ """Return dict of possible dependencies of this package.
+
+ Args:
+ transitive (bool or None): return all transitive dependencies if
+ True, only direct dependencies if False (default True)..
+ expand_virtuals (bool or None): expand virtual dependencies into
+ all possible implementations (default True)
+ deptype (str or tuple or None): dependency types to consider
+ visited (dict or None): dict of names of dependencies visited so
+ far, mapped to their immediate dependencies' names.
+ missing (dict or None): dict to populate with packages and their
+ *missing* dependencies.
+ virtuals (set): if provided, populate with virtuals seen so far.
+
+ Returns:
+ (dict): dictionary mapping dependency names to *their*
+ immediate dependencies
+
+ Each item in the returned dictionary maps a (potentially
+ transitive) dependency of this package to its possible
+ *immediate* dependencies. If ``expand_virtuals`` is ``False``,
+ virtual package names wil be inserted as keys mapped to empty
+ sets of dependencies. Virtuals, if not expanded, are treated as
+ though they have no immediate dependencies.
+
+ Missing dependencies by default are ignored, but if a
+ missing dict is provided, it will be populated with package names
+ mapped to any dependencies they have that are in no
+ repositories. This is only populated if transitive is True.
+
+ Note: the returned dict *includes* the package itself.
+
+ """
+ deptype = spack.dependency.canonical_deptype(deptype)
+
+ visited = {} if visited is None else visited
+ missing = {} if missing is None else missing
+
+ visited.setdefault(cls.name, set())
+
+ for name, conditions in cls.dependencies.items():
+ # check whether this dependency could be of the type asked for
+ types = [dep.type for cond, dep in conditions.items()]
+ types = set.union(*types)
+ if not any(d in types for d in deptype):
+ continue
+
+ # expand virtuals if enabled, otherwise just stop at virtuals
+ if spack.repo.path.is_virtual(name):
+ if virtuals is not None:
+ virtuals.add(name)
+ if expand_virtuals:
+ providers = spack.repo.path.providers_for(name)
+ dep_names = [spec.name for spec in providers]
+ else:
+ visited.setdefault(cls.name, set()).add(name)
+ visited.setdefault(name, set())
+ continue
+ else:
+ dep_names = [name]
+
+ # add the dependency names to the visited dict
+ visited.setdefault(cls.name, set()).update(set(dep_names))
+
+ # recursively traverse dependencies
+ for dep_name in dep_names:
+ if dep_name in visited:
+ continue
+
+ visited.setdefault(dep_name, set())
+
+ # skip the rest if not transitive
+ if not transitive:
+ continue
+
+ try:
+ dep_cls = spack.repo.path.get_pkg_class(dep_name)
+ except spack.repo.UnknownPackageError:
+ # log unknown packages
+ missing.setdefault(cls.name, set()).add(dep_name)
+ continue
+
+ dep_cls.possible_dependencies(
+ transitive, expand_virtuals, deptype, visited, missing,
+ virtuals)
+
+ return visited
+
+ def enum_constraints(self, visited=None):
+ """Return transitive dependency constraints on this package."""
+ if visited is None:
+ visited = set()
+ visited.add(self.name)
+
+ names = []
+ clauses = []
+
+ for name in self.dependencies:
+ if name not in visited and not spack.spec.Spec(name).virtual:
+ pkg = spack.repo.get(name)
+ dvis, dnames, dclauses = pkg.enum_constraints(visited)
+ visited |= dvis
+ names.extend(dnames)
+ clauses.extend(dclauses)
+
+ return visited
+
+ # package_dir and module are *class* properties (see PackageMeta),
+ # but to make them work on instances we need these defs as well.
+ @property
+ def package_dir(self):
+ """Directory where the package.py file lives."""
+ return type(self).package_dir
+
+ @property
+ def module(self):
+ """Module object that this package is defined in."""
+ return type(self).module
+
+ @property
+ def namespace(self):
+ """Spack namespace for the package, which identifies its repo."""
+ return type(self).namespace
+
+ @property
+ def fullname(self):
+ """Name of this package, including namespace: namespace.name."""
+ return type(self).fullname
+
+ @property
+ def fullnames(self):
+ return type(self).fullnames
+
+ @property
+ def name(self):
+ """Name of this package (the module without parent modules)."""
+ return type(self).name
+
+ @property
+ def global_license_dir(self):
+ """Returns the directory where global license files are stored."""
+ return type(self).global_license_dir
+
+ @property
+ def global_license_file(self):
+ """Returns the path where a global license file for this
+ particular package should be stored."""
+ if not self.license_files:
+ return
+ return os.path.join(self.global_license_dir, self.name,
+ os.path.basename(self.license_files[0]))
+
+ @property
+ def version(self):
+ if not self.spec.versions.concrete:
+ raise ValueError("Version requested for a package that"
+ " does not have a concrete version.")
+ return self.spec.versions[0]
+
+ @memoized
+ def version_urls(self):
+ """OrderedDict of explicitly defined URLs for versions of this package.
+
+ Return:
+ An OrderedDict (version -> URL) different versions of this
+ package, sorted by version.
+
+ A version's URL only appears in the result if it has an an
+ explicitly defined ``url`` argument. So, this list may be empty
+ if a package only defines ``url`` at the top level.
+ """
+ version_urls = collections.OrderedDict()
+ for v, args in sorted(self.versions.items()):
+ if 'url' in args:
+ version_urls[v] = args['url']
+ return version_urls
+
+ def nearest_url(self, version):
+ """Finds the URL with the "closest" version to ``version``.
+
+ This uses the following precedence order:
+
+ 1. Find the next lowest or equal version with a URL.
+ 2. If no lower URL, return the next *higher* URL.
+ 3. If no higher URL, return None.
+
+ """
+ version_urls = self.version_urls()
+
+ if version in version_urls:
+ return version_urls[version]
+
+ last_url = None
+ for v, u in self.version_urls().items():
+ if v > version:
+ if last_url:
+ return last_url
+ last_url = u
+
+ return last_url
+
+ def url_for_version(self, version):
+ """Returns a URL from which the specified version of this package
+ may be downloaded.
+
+ version: class Version
+ The version for which a URL is sought.
+
+ See Class Version (version.py)
+ """
+ return self._implement_all_urls_for_version(version)[0]
+
+ def all_urls_for_version(self, version, custom_url_for_version=None):
+ """Returns all URLs derived from version_urls(), url, urls, and
+ list_url (if it contains a version) in a package in that order.
+
+ version: class Version
+ The version for which a URL is sought.
+
+ See Class Version (version.py)
+ """
+ uf = None
+ if type(self).url_for_version != Package.url_for_version:
+ uf = self.url_for_version
+ return self._implement_all_urls_for_version(version, uf)
+
+ def _implement_all_urls_for_version(self, version, custom_url_for_version=None):
+ if not isinstance(version, Version):
+ version = Version(version)
+
+ urls = []
+
+ # If we have a specific URL for this version, don't extrapolate.
+ version_urls = self.version_urls()
+ if version in version_urls:
+ urls.append(version_urls[version])
+
+ # if there is a custom url_for_version, use it
+ if custom_url_for_version is not None:
+ u = custom_url_for_version(version)
+ if u not in urls and u is not None:
+ urls.append(u)
+
+ def sub_and_add(u):
+ if u is None:
+ return
+ # skip the url if there is no version to replace
+ try:
+ spack.url.parse_version(u)
+ except spack.url.UndetectableVersionError:
+ return
+ nu = spack.url.substitute_version(u, self.url_version(version))
+
+ urls.append(nu)
+ # If no specific URL, use the default, class-level URL
+ sub_and_add(getattr(self, 'url', None))
+ for u in getattr(self, 'urls', []):
+ sub_and_add(u)
+
+ sub_and_add(getattr(self, 'list_url', None))
+
+ # if no version-bearing URLs can be found, try them raw
+ if not urls:
+ default_url = getattr(self, "url", getattr(self, "urls", [None])[0])
+
+ # if no exact match AND no class-level default, use the nearest URL
+ if not default_url:
+ default_url = self.nearest_url(version)
+
+ # if there are NO URLs to go by, then we can't do anything
+ if not default_url:
+ raise NoURLError(self.__class__)
+ urls.append(
+ spack.url.substitute_version(
+ default_url, self.url_version(version)
+ )
+ )
+
+ return urls
+
+ def find_valid_url_for_version(self, version):
+ """Returns a URL from which the specified version of this package
+ may be downloaded after testing whether the url is valid. Will try
+ url, urls, and list_url before failing.
+
+ version: class Version
+ The version for which a URL is sought.
+
+ See Class Version (version.py)
+ """
+ urls = self.all_urls_for_version(version)
+
+ for u in urls:
+ if spack.util.web.url_exists(u):
+ return u
+
+ return None
+
+ def _make_resource_stage(self, root_stage, fetcher, resource):
+ resource_stage_folder = self._resource_stage(resource)
+ mirror_paths = spack.mirror.mirror_archive_paths(
+ fetcher,
+ os.path.join(self.name, "%s-%s" % (resource.name, self.version)))
+ stage = ResourceStage(resource.fetcher,
+ root=root_stage,
+ resource=resource,
+ name=resource_stage_folder,
+ mirror_paths=mirror_paths,
+ path=self.path)
+ return stage
+
+ def _download_search(self):
+ dynamic_fetcher = fs.from_list_url(self)
+ return [dynamic_fetcher] if dynamic_fetcher else []
+
+ def _make_root_stage(self, fetcher):
+ # Construct a mirror path (TODO: get this out of package.py)
+ mirror_paths = spack.mirror.mirror_archive_paths(
+ fetcher,
+ os.path.join(self.name, "%s-%s" % (self.name, self.version)),
+ self.spec)
+ # Construct a path where the stage should build..
+ s = self.spec
+ stage_name = "{0}{1}-{2}-{3}".format(stage_prefix, s.name, s.version,
+ s.dag_hash())
+
+ stage = Stage(fetcher, mirror_paths=mirror_paths, name=stage_name,
+ path=self.path, search_fn=self._download_search)
+ return stage
+
+ def _make_stage(self):
+ # If it's a dev package (not transitively), use a DIY stage object
+ dev_path_var = self.spec.variants.get('dev_path', None)
+ if dev_path_var:
+ return spack.stage.DIYStage(dev_path_var.value)
+
+ # Construct a composite stage on top of the composite FetchStrategy
+ composite_fetcher = self.fetcher
+ composite_stage = StageComposite()
+ resources = self._get_needed_resources()
+ for ii, fetcher in enumerate(composite_fetcher):
+ if ii == 0:
+ # Construct root stage first
+ stage = self._make_root_stage(fetcher)
+ else:
+ # Construct resource stage
+ resource = resources[ii - 1] # ii == 0 is root!
+ stage = self._make_resource_stage(composite_stage[0], fetcher,
+ resource)
+ # Append the item to the composite
+ composite_stage.append(stage)
+
+ return composite_stage
+
+ @property
+ def stage(self):
+ """Get the build staging area for this package.
+
+ This automatically instantiates a ``Stage`` object if the package
+ doesn't have one yet, but it does not create the Stage directory
+ on the filesystem.
+ """
+ if not self.spec.versions.concrete:
+ raise ValueError(
+ "Cannot retrieve stage for package without concrete version.")
+ if self._stage is None:
+ self._stage = self._make_stage()
+ return self._stage
+
+ @stage.setter
+ def stage(self, stage):
+ """Allow a stage object to be set to override the default."""
+ self._stage = stage
+
+ @property
+ def env_path(self):
+ """Return the build environment file path associated with staging."""
+ # Backward compatibility: Return the name of an existing log path;
+ # otherwise, return the current install env path name.
+ old_filename = os.path.join(self.stage.path, 'spack-build.env')
+ if os.path.exists(old_filename):
+ return old_filename
+ else:
+ return os.path.join(self.stage.path, _spack_build_envfile)
+
+ @property
+ def env_mods_path(self):
+ """
+ Return the build environment modifications file path associated with
+ staging.
+ """
+ return os.path.join(self.stage.path, _spack_build_envmodsfile)
+
+ @property
+ def metadata_dir(self):
+ """Return the install metadata directory."""
+ return spack.store.layout.metadata_path(self.spec)
+
+ @property
+ def install_env_path(self):
+ """
+ Return the build environment file path on successful installation.
+ """
+ # Backward compatibility: Return the name of an existing log path;
+ # otherwise, return the current install env path name.
+ old_filename = os.path.join(self.metadata_dir, 'build.env')
+ if os.path.exists(old_filename):
+ return old_filename
+ else:
+ return os.path.join(self.metadata_dir, _spack_build_envfile)
+
+ @property
+ def log_path(self):
+ """Return the build log file path associated with staging."""
+ # Backward compatibility: Return the name of an existing log path.
+ for filename in ['spack-build.out', 'spack-build.txt']:
+ old_log = os.path.join(self.stage.path, filename)
+ if os.path.exists(old_log):
+ return old_log
+
+ # Otherwise, return the current log path name.
+ return os.path.join(self.stage.path, _spack_build_logfile)
+
+ @property
+ def phase_log_files(self):
+ """Find sorted phase log files written to the staging directory"""
+ logs_dir = os.path.join(self.stage.path, "spack-build-*-out.txt")
+ log_files = glob.glob(logs_dir)
+ log_files.sort()
+ return log_files
+
+ @property
+ def install_log_path(self):
+ """Return the build log file path on successful installation."""
+ # Backward compatibility: Return the name of an existing install log.
+ for filename in ['build.out', 'build.txt']:
+ old_log = os.path.join(self.metadata_dir, filename)
+ if os.path.exists(old_log):
+ return old_log
+
+ # Otherwise, return the current install log path name.
+ return os.path.join(self.metadata_dir, _spack_build_logfile)
+
+ @property
+ def configure_args_path(self):
+ """Return the configure args file path associated with staging."""
+ return os.path.join(self.stage.path, _spack_configure_argsfile)
+
+ @property
+ def test_install_log_path(self):
+ """Return the install phase-time test log file path, if set."""
+ return getattr(self, 'test_log_file', None)
+
+ @property
+ def install_test_install_log_path(self):
+ """Return the install location for the install phase-time test log."""
+ return fsys.join_path(self.metadata_dir, _spack_install_test_log)
+
+ @property
+ def times_log_path(self):
+ """Return the times log json file."""
+ return os.path.join(self.metadata_dir, _spack_times_log)
+
+ @property
+ def install_configure_args_path(self):
+ """Return the configure args file path on successful installation."""
+ return os.path.join(self.metadata_dir, _spack_configure_argsfile)
+
+ @property
+ def install_test_root(self):
+ """Return the install test root directory."""
+ return os.path.join(self.metadata_dir, 'test')
+
+ @property
+ def installed(self):
+ msg = ('the "PackageBase.installed" property is deprecated and will be '
+ 'removed in Spack v0.19, use "Spec.installed" instead')
+ warnings.warn(msg)
+ return self.spec.installed
+
+ @property
+ def installed_upstream(self):
+ msg = ('the "PackageBase.installed_upstream" property is deprecated and will '
+ 'be removed in Spack v0.19, use "Spec.installed_upstream" instead')
+ warnings.warn(msg)
+ return self.spec.installed_upstream
+
+ def _make_fetcher(self):
+ # Construct a composite fetcher that always contains at least
+ # one element (the root package). In case there are resources
+ # associated with the package, append their fetcher to the
+ # composite.
+ root_fetcher = fs.for_package_version(self, self.version)
+ fetcher = fs.FetchStrategyComposite() # Composite fetcher
+ fetcher.append(root_fetcher) # Root fetcher is always present
+ resources = self._get_needed_resources()
+ for resource in resources:
+ fetcher.append(resource.fetcher)
+ return fetcher
+
+ @property
+ def fetcher(self):
+ if not self.spec.versions.concrete:
+ raise ValueError("Cannot retrieve fetcher for"
+ " package without concrete version.")
+ if not self._fetcher:
+ self._fetcher = self._make_fetcher()
+ return self._fetcher
+
+ @fetcher.setter
+ def fetcher(self, f):
+ self._fetcher = f
+
+ def dependencies_of_type(self, *deptypes):
+ """Get dependencies that can possibly have these deptypes.
+
+ This analyzes the package and determines which dependencies *can*
+ be a certain kind of dependency. Note that they may not *always*
+ be this kind of dependency, since dependencies can be optional,
+ so something may be a build dependency in one configuration and a
+ run dependency in another.
+ """
+ return dict(
+ (name, conds) for name, conds in self.dependencies.items()
+ if any(dt in self.dependencies[name][cond].type
+ for cond in conds for dt in deptypes))
+
+ @property
+ def extendee_spec(self):
+ """
+ Spec of the extendee of this package, or None if it is not an extension
+ """
+ if not self.extendees:
+ return None
+
+ deps = []
+
+ # If the extendee is in the spec's deps already, return that.
+ for dep in self.spec.traverse(deptype=('link', 'run')):
+ if dep.name in self.extendees:
+ deps.append(dep)
+
+ # TODO: allow more than one active extendee.
+ if deps:
+ assert len(deps) == 1
+ return deps[0]
+
+ # if the spec is concrete already, then it extends something
+ # that is an *optional* dependency, and the dep isn't there.
+ if self.spec._concrete:
+ return None
+ else:
+ # TODO: do something sane here with more than one extendee
+ # If it's not concrete, then return the spec from the
+ # extends() directive since that is all we know so far.
+ spec, kwargs = next(iter(self.extendees.items()))
+ return spec
+
+ @property
+ def extendee_args(self):
+ """
+ Spec of the extendee of this package, or None if it is not an extension
+ """
+ if not self.extendees:
+ return None
+
+ # TODO: allow multiple extendees.
+ name = next(iter(self.extendees))
+ return self.extendees[name][1]
+
+ @property
+ def is_extension(self):
+ # if it is concrete, it's only an extension if it actually
+ # dependes on the extendee.
+ if self.spec._concrete:
+ return self.extendee_spec is not None
+ else:
+ # If not, then it's an extension if it *could* be an extension
+ return bool(self.extendees)
+
+ def extends(self, spec):
+ '''
+ Returns True if this package extends the given spec.
+
+ If ``self.spec`` is concrete, this returns whether this package extends
+ the given spec.
+
+ If ``self.spec`` is not concrete, this returns whether this package may
+ extend the given spec.
+ '''
+ if spec.name not in self.extendees:
+ return False
+ s = self.extendee_spec
+ return s and spec.satisfies(s)
+
+ def is_activated(self, view):
+ """Return True if package is activated."""
+ if not self.is_extension:
+ raise ValueError(
+ "is_activated called on package that is not an extension.")
+ if self.extendee_spec.installed_upstream:
+ # If this extends an upstream package, it cannot be activated for
+ # it. This bypasses construction of the extension map, which can
+ # can fail when run in the context of a downstream Spack instance
+ return False
+ extensions_layout = view.extensions_layout
+ exts = extensions_layout.extension_map(self.extendee_spec)
+ return (self.name in exts) and (exts[self.name] == self.spec)
+
+ def provides(self, vpkg_name):
+ """
+ True if this package provides a virtual package with the specified name
+ """
+ return any(
+ any(self.spec.satisfies(c) for c in constraints)
+ for s, constraints in self.provided.items() if s.name == vpkg_name
+ )
+
+ @property
+ def virtuals_provided(self):
+ """
+ virtual packages provided by this package with its spec
+ """
+ return [vspec for vspec, constraints in self.provided.items()
+ if any(self.spec.satisfies(c) for c in constraints)]
+
+ @property
+ def prefix(self):
+ """Get the prefix into which this package should be installed."""
+ return self.spec.prefix
+
+ @property # type: ignore[misc]
+ @memoized
+ def compiler(self):
+ """Get the spack.compiler.Compiler object used to build this package"""
+ if not self.spec.concrete:
+ raise ValueError("Can only get a compiler for a concrete package.")
+
+ return spack.compilers.compiler_for_spec(self.spec.compiler,
+ self.spec.architecture)
+
+ def url_version(self, version):
+ """
+ Given a version, this returns a string that should be substituted
+ into the package's URL to download that version.
+
+ By default, this just returns the version string. Subclasses may need
+ to override this, e.g. for boost versions where you need to ensure that
+ there are _'s in the download URL.
+ """
+ return str(version)
+
+ def remove_prefix(self):
+ """
+ Removes the prefix for a package along with any empty parent
+ directories
+ """
+ spack.store.layout.remove_install_directory(self.spec)
+
+ @property
+ def download_instr(self):
+ """
+ Defines the default manual download instructions. Packages can
+ override the property to provide more information.
+
+ Returns:
+ (str): default manual download instructions
+ """
+ required = ('Manual download is required for {0}. '
+ .format(self.spec.name) if self.manual_download else '')
+ return ('{0}Refer to {1} for download instructions.'
+ .format(required, self.spec.package.homepage))
+
+ def do_fetch(self, mirror_only=False):
+ """
+ Creates a stage directory and downloads the tarball for this package.
+ Working directory will be set to the stage directory.
+ """
+ if not self.has_code or self.spec.external:
+ tty.debug('No fetch required for {0}'.format(self.name))
+ return
+
+ checksum = spack.config.get('config:checksum')
+ fetch = self.stage.managed_by_spack
+ if checksum and fetch and (self.version not in self.versions) \
+ and (not self.version.is_commit):
+ tty.warn("There is no checksum on file to fetch %s safely." %
+ self.spec.cformat('{name}{@version}'))
+
+ # Ask the user whether to skip the checksum if we're
+ # interactive, but just fail if non-interactive.
+ ck_msg = "Add a checksum or use --no-checksum to skip this check."
+ ignore_checksum = False
+ if sys.stdout.isatty():
+ ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
+ default=False)
+ if ignore_checksum:
+ tty.debug('Fetching with no checksum. {0}'
+ .format(ck_msg))
+
+ if not ignore_checksum:
+ raise FetchError("Will not fetch %s" %
+ self.spec.format('{name}{@version}'), ck_msg)
+
+ deprecated = spack.config.get('config:deprecated')
+ if not deprecated and self.versions.get(
+ self.version, {}).get('deprecated', False):
+ tty.warn("{0} is deprecated and may be removed in a future Spack "
+ "release.".format(
+ self.spec.format('{name}{@version}')))
+
+ # Ask the user whether to install deprecated version if we're
+ # interactive, but just fail if non-interactive.
+ dp_msg = ("If you are willing to be a maintainer for this version "
+ "of the package, submit a PR to remove `deprecated=False"
+ "`, or use `--deprecated` to skip this check.")
+ ignore_deprecation = False
+ if sys.stdout.isatty():
+ ignore_deprecation = tty.get_yes_or_no(" Fetch anyway?",
+ default=False)
+
+ if ignore_deprecation:
+ tty.debug("Fetching deprecated version. {0}".format(
+ dp_msg))
+
+ if not ignore_deprecation:
+ raise FetchError("Will not fetch {0}".format(
+ self.spec.format('{name}{@version}')), dp_msg)
+
+ self.stage.create()
+ err_msg = None if not self.manual_download else self.download_instr
+ start_time = time.time()
+ self.stage.fetch(mirror_only, err_msg=err_msg)
+ self._fetch_time = time.time() - start_time
+
+ if checksum and self.version in self.versions:
+ self.stage.check()
+
+ self.stage.cache_local()
+
+ for patch in self.spec.patches:
+ patch.fetch()
+ if patch.stage:
+ patch.stage.cache_local()
+
+ def do_stage(self, mirror_only=False):
+ """Unpacks and expands the fetched tarball."""
+ # Always create the stage directory at this point. Why? A no-code
+ # package may want to use the installation process to install metadata.
+ self.stage.create()
+
+ # Fetch/expand any associated code.
+ if self.has_code:
+ self.do_fetch(mirror_only)
+ self.stage.expand_archive()
+
+ if not os.listdir(self.stage.path):
+ raise FetchError("Archive was empty for %s" % self.name)
+ else:
+ # Support for post-install hooks requires a stage.source_path
+ fsys.mkdirp(self.stage.source_path)
+
+ def do_patch(self):
+ """Applies patches if they haven't been applied already."""
+ if not self.spec.concrete:
+ raise ValueError("Can only patch concrete packages.")
+
+ # Kick off the stage first. This creates the stage.
+ self.do_stage()
+
+ # Package can add its own patch function.
+ has_patch_fun = hasattr(self, 'patch') and callable(self.patch)
+
+ # Get the patches from the spec (this is a shortcut for the MV-variant)
+ patches = self.spec.patches
+
+ # If there are no patches, note it.
+ if not patches and not has_patch_fun:
+ tty.msg('No patches needed for {0}'.format(self.name))
+ return
+
+ # Construct paths to special files in the archive dir used to
+ # keep track of whether patches were successfully applied.
+ archive_dir = self.stage.source_path
+ good_file = os.path.join(archive_dir, '.spack_patched')
+ no_patches_file = os.path.join(archive_dir, '.spack_no_patches')
+ bad_file = os.path.join(archive_dir, '.spack_patch_failed')
+
+ # If we encounter an archive that failed to patch, restage it
+ # so that we can apply all the patches again.
+ if os.path.isfile(bad_file):
+ tty.debug('Patching failed last time. Restaging.')
+ self.stage.restage()
+
+ # If this file exists, then we already applied all the patches.
+ if os.path.isfile(good_file):
+ tty.msg('Already patched {0}'.format(self.name))
+ return
+ elif os.path.isfile(no_patches_file):
+ tty.msg('No patches needed for {0}'.format(self.name))
+ return
+
+ # Apply all the patches for specs that match this one
+ patched = False
+ for patch in patches:
+ try:
+ with fsys.working_dir(self.stage.source_path):
+ patch.apply(self.stage)
+ tty.msg('Applied patch {0}'.format(patch.path_or_url))
+ patched = True
+ except spack.error.SpackError as e:
+ tty.debug(e)
+
+ # Touch bad file if anything goes wrong.
+ tty.msg('Patch %s failed.' % patch.path_or_url)
+ fsys.touch(bad_file)
+ raise
+
+ if has_patch_fun:
+ try:
+ with fsys.working_dir(self.stage.source_path):
+ self.patch()
+ tty.msg('Ran patch() for {0}'.format(self.name))
+ patched = True
+ except spack.multimethod.NoSuchMethodError:
+ # We are running a multimethod without a default case.
+ # If there's no default it means we don't need to patch.
+ if not patched:
+ # if we didn't apply a patch from a patch()
+ # directive, AND the patch function didn't apply, say
+ # no patches are needed. Otherwise, we already
+ # printed a message for each patch.
+ tty.msg('No patches needed for {0}'.format(self.name))
+ except spack.error.SpackError as e:
+ tty.debug(e)
+
+ # Touch bad file if anything goes wrong.
+ tty.msg('patch() function failed for {0}'.format(self.name))
+ fsys.touch(bad_file)
+ raise
+
+ # Get rid of any old failed file -- patches have either succeeded
+ # or are not needed. This is mostly defensive -- it's needed
+ # if the restage() method doesn't clean *everything* (e.g., for a repo)
+ if os.path.isfile(bad_file):
+ os.remove(bad_file)
+
+ # touch good or no patches file so that we skip next time.
+ if patched:
+ fsys.touch(good_file)
+ else:
+ fsys.touch(no_patches_file)
+
+ @classmethod
+ def all_patches(cls):
+ """Retrieve all patches associated with the package.
+
+ Retrieves patches on the package itself as well as patches on the
+ dependencies of the package."""
+ patches = []
+ for _, patch_list in cls.patches.items():
+ for patch in patch_list:
+ patches.append(patch)
+
+ pkg_deps = cls.dependencies
+ for dep_name in pkg_deps:
+ for _, dependency in pkg_deps[dep_name].items():
+ for _, patch_list in dependency.patches.items():
+ for patch in patch_list:
+ patches.append(patch)
+
+ return patches
+
+ def content_hash(self, content=None):
+ """Create a hash based on the artifacts and patches used to build this package.
+
+ This includes:
+ * source artifacts (tarballs, repositories) used to build;
+ * content hashes (``sha256``'s) of all patches applied by Spack; and
+ * canonicalized contents the ``package.py`` recipe used to build.
+
+ This hash is only included in Spack's DAG hash for concrete specs, but if it
+ happens to be called on a package with an abstract spec, only applicable (i.e.,
+ determinable) portions of the hash will be included.
+
+ """
+ # list of components to make up the hash
+ hash_content = []
+
+ # source artifacts/repositories
+ # TODO: resources
+ if self.spec.versions.concrete:
+ try:
+ source_id = fs.for_package_version(self, self.version).source_id()
+ except (fs.ExtrapolationError, fs.InvalidArgsError):
+ # ExtrapolationError happens if the package has no fetchers defined.
+ # InvalidArgsError happens when there are version directives with args,
+ # but none of them identifies an actual fetcher.
+ source_id = None
+
+ if not source_id:
+ # TODO? in cases where a digest or source_id isn't available,
+ # should this attempt to download the source and set one? This
+ # probably only happens for source repositories which are
+ # referenced by branch name rather than tag or commit ID.
+ env = spack.environment.active_environment()
+ from_local_sources = env and env.is_develop(self.spec)
+ if not self.spec.external and not from_local_sources:
+ message = 'Missing a source id for {s.name}@{s.version}'
+ tty.warn(message.format(s=self))
+ hash_content.append(''.encode('utf-8'))
+ else:
+ hash_content.append(source_id.encode('utf-8'))
+
+ # patch sha256's
+ # Only include these if they've been assigned by the concretizer.
+ # We check spec._patches_assigned instead of spec.concrete because
+ # we have to call package_hash *before* marking specs concrete
+ if self.spec._patches_assigned():
+ hash_content.extend(
+ ':'.join((p.sha256, str(p.level))).encode('utf-8')
+ for p in self.spec.patches
+ )
+
+ # package.py contents
+ hash_content.append(package_hash(self.spec, source=content).encode('utf-8'))
+
+ # put it all together and encode as base32
+ b32_hash = base64.b32encode(
+ hashlib.sha256(
+ bytes().join(sorted(hash_content))
+ ).digest()
+ ).lower()
+
+ # convert from bytes if running python 3
+ if sys.version_info[0] >= 3:
+ b32_hash = b32_hash.decode('utf-8')
+
+ return b32_hash
+
+ def _has_make_target(self, target):
+ """Checks to see if 'target' is a valid target in a Makefile.
+
+ Parameters:
+ target (str): the target to check for
+
+ Returns:
+ bool: True if 'target' is found, else False
+ """
+ # Prevent altering LC_ALL for 'make' outside this function
+ make = copy.deepcopy(inspect.getmodule(self).make)
+
+ # Use English locale for missing target message comparison
+ make.add_default_env('LC_ALL', 'C')
+
+ # Check if we have a Makefile
+ for makefile in ['GNUmakefile', 'Makefile', 'makefile']:
+ if os.path.exists(makefile):
+ break
+ else:
+ tty.debug('No Makefile found in the build directory')
+ return False
+
+ # Check if 'target' is a valid target.
+ #
+ # `make -n target` performs a "dry run". It prints the commands that
+ # would be run but doesn't actually run them. If the target does not
+ # exist, you will see one of the following error messages:
+ #
+ # GNU Make:
+ # make: *** No rule to make target `test'. Stop.
+ # *** No rule to make target 'test'. Stop.
+ #
+ # BSD Make:
+ # make: don't know how to make test. Stop
+ missing_target_msgs = [
+ "No rule to make target `{0}'. Stop.",
+ "No rule to make target '{0}'. Stop.",
+ "don't know how to make {0}. Stop",
+ ]
+
+ kwargs = {
+ 'fail_on_error': False,
+ 'output': os.devnull,
+ 'error': str,
+ }
+
+ stderr = make('-n', target, **kwargs)
+
+ for missing_target_msg in missing_target_msgs:
+ if missing_target_msg.format(target) in stderr:
+ tty.debug("Target '{0}' not found in {1}"
+ .format(target, makefile))
+ return False
+
+ return True
+
+ def _if_make_target_execute(self, target, *args, **kwargs):
+ """Runs ``make target`` if 'target' is a valid target in the Makefile.
+
+ Parameters:
+ target (str): the target to potentially execute
+ """
+ if self._has_make_target(target):
+ # Execute target
+ inspect.getmodule(self).make(target, *args, **kwargs)
+
+ def _has_ninja_target(self, target):
+ """Checks to see if 'target' is a valid target in a Ninja build script.
+
+ Parameters:
+ target (str): the target to check for
+
+ Returns:
+ bool: True if 'target' is found, else False
+ """
+ ninja = inspect.getmodule(self).ninja
+
+ # Check if we have a Ninja build script
+ if not os.path.exists('build.ninja'):
+ tty.debug('No Ninja build script found in the build directory')
+ return False
+
+ # Get a list of all targets in the Ninja build script
+ # https://ninja-build.org/manual.html#_extra_tools
+ all_targets = ninja('-t', 'targets', 'all', output=str).split('\n')
+
+ # Check if 'target' is a valid target
+ matches = [line for line in all_targets
+ if line.startswith(target + ':')]
+
+ if not matches:
+ tty.debug("Target '{0}' not found in build.ninja"
+ .format(target))
+ return False
+
+ return True
+
+ def _if_ninja_target_execute(self, target, *args, **kwargs):
+ """Runs ``ninja target`` if 'target' is a valid target in the Ninja
+ build script.
+
+ Parameters:
+ target (str): the target to potentially execute
+ """
+ if self._has_ninja_target(target):
+ # Execute target
+ inspect.getmodule(self).ninja(target, *args, **kwargs)
+
+ def _get_needed_resources(self):
+ resources = []
+ # Select the resources that are needed for this build
+ if self.spec.concrete:
+ for when_spec, resource_list in self.resources.items():
+ if when_spec in self.spec:
+ resources.extend(resource_list)
+ else:
+ for when_spec, resource_list in self.resources.items():
+ # Note that variant checking is always strict for specs where
+ # the name is not specified. But with strict variant checking,
+ # only variants mentioned in 'other' are checked. Here we only
+ # want to make sure that no constraints in when_spec
+ # conflict with the spec, so we need to invoke
+ # when_spec.satisfies(self.spec) vs.
+ # self.spec.satisfies(when_spec)
+ if when_spec.satisfies(self.spec, strict=False):
+ resources.extend(resource_list)
+ # Sorts the resources by the length of the string representing their
+ # destination. Since any nested resource must contain another
+ # resource's name in its path, it seems that should work
+ resources = sorted(resources, key=lambda res: len(res.destination))
+ return resources
+
+ def _resource_stage(self, resource):
+ pieces = ['resource', resource.name, self.spec.dag_hash()]
+ resource_stage_folder = '-'.join(pieces)
+ return resource_stage_folder
+
+ @contextlib.contextmanager
+ def _stage_and_write_lock(self):
+ """Prefix lock nested in a stage."""
+ with self.stage:
+ with spack.store.db.prefix_write_lock(self.spec):
+ yield
+
+ def do_install(self, **kwargs):
+ """Called by commands to install a package and or its dependencies.
+
+ Package implementations should override install() to describe
+ their build process.
+
+ Args:
+ cache_only (bool): Fail if binary package unavailable.
+ dirty (bool): Don't clean the build environment before installing.
+ explicit (bool): True if package was explicitly installed, False
+ if package was implicitly installed (as a dependency).
+ fail_fast (bool): Fail if any dependency fails to install;
+ otherwise, the default is to install as many dependencies as
+ possible (i.e., best effort installation).
+ fake (bool): Don't really build; install fake stub files instead.
+ force (bool): Install again, even if already installed.
+ install_deps (bool): Install dependencies before installing this
+ package
+ install_source (bool): By default, source is not installed, but
+ for debugging it might be useful to keep it around.
+ keep_prefix (bool): Keep install prefix on failure. By default,
+ destroys it.
+ keep_stage (bool): By default, stage is destroyed only if there
+ are no exceptions during build. Set to True to keep the stage
+ even with exceptions.
+ restage (bool): Force spack to restage the package source.
+ skip_patch (bool): Skip patch stage of build if True.
+ stop_before (InstallPhase): stop execution before this
+ installation phase (or None)
+ stop_at (InstallPhase): last installation phase to be executed
+ (or None)
+ tests (bool or list or set): False to run no tests, True to test
+ all packages, or a list of package names to run tests for some
+ use_cache (bool): Install from binary package, if available.
+ verbose (bool): Display verbose build output (by default,
+ suppresses it)
+ """
+ # Non-transitive dev specs need to keep the dev stage and be built from
+ # source every time. Transitive ones just need to be built from source.
+ dev_path_var = self.spec.variants.get('dev_path', None)
+ if dev_path_var:
+ kwargs['keep_stage'] = True
+
+ builder = PackageInstaller([(self, kwargs)])
+ builder.install()
+
+ def cache_extra_test_sources(self, srcs):
+ """Copy relative source paths to the corresponding install test subdir
+
+ This method is intended as an optional install test setup helper for
+ grabbing source files/directories during the installation process and
+ copying them to the installation test subdirectory for subsequent use
+ during install testing.
+
+ Args:
+ srcs (str or list): relative path for files and or
+ subdirectories located in the staged source path that are to
+ be copied to the corresponding location(s) under the install
+ testing directory.
+ """
+ paths = [srcs] if isinstance(srcs, six.string_types) else srcs
+
+ for path in paths:
+ src_path = os.path.join(self.stage.source_path, path)
+ dest_path = os.path.join(self.install_test_root, path)
+ if os.path.isdir(src_path):
+ fsys.install_tree(src_path, dest_path)
+ else:
+ fsys.mkdirp(os.path.dirname(dest_path))
+ fsys.copy(src_path, dest_path)
+
+ @contextlib.contextmanager
+ def _setup_test(self, verbose, externals):
+ self.test_failures = []
+ if self.test_suite:
+ self.test_log_file = self.test_suite.log_file_for_spec(self.spec)
+ self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
+ pkg_id = self.test_suite.test_pkg_id(self.spec)
+ else:
+ self.test_log_file = fsys.join_path(
+ self.stage.path, _spack_install_test_log)
+ pkg_id = self.spec.format('{name}-{version}-{hash:7}')
+ fsys.touch(self.test_log_file) # Otherwise log_parse complains
+
+ with tty.log.log_output(self.test_log_file, verbose) as logger:
+ with logger.force_echo():
+ tty.msg('Testing package {0}'.format(pkg_id))
+
+ # use debug print levels for log file to record commands
+ old_debug = tty.is_debug()
+ tty.set_debug(True)
+
+ try:
+ yield logger
+ finally:
+ # reset debug level
+ tty.set_debug(old_debug)
+
+ def do_test(self, dirty=False, externals=False):
+ if self.test_requires_compiler:
+ compilers = spack.compilers.compilers_for_spec(
+ self.spec.compiler, arch_spec=self.spec.architecture)
+ if not compilers:
+ tty.error('Skipping tests for package %s\n' %
+ self.spec.format('{name}-{version}-{hash:7}') +
+ 'Package test requires missing compiler %s' %
+ self.spec.compiler)
+ return
+
+ kwargs = {
+ 'dirty': dirty, 'fake': False, 'context': 'test',
+ 'externals': externals
+ }
+ if tty.is_verbose():
+ kwargs['verbose'] = True
+ spack.build_environment.start_build_process(
+ self, test_process, kwargs)
+
+ def test(self):
+ # Defer tests to virtual and concrete packages
+ pass
+
+ def run_test(self, exe, options=[], expected=[], status=0,
+ installed=False, purpose='', skip_missing=False,
+ work_dir=None):
+ """Run the test and confirm the expected results are obtained
+
+ Log any failures and continue, they will be re-raised later
+
+ Args:
+ exe (str): the name of the executable
+ options (str or list): list of options to pass to the runner
+ expected (str or list): list of expected output strings.
+ Each string is a regex expected to match part of the output.
+ status (int or list): possible passing status values
+ with 0 meaning the test is expected to succeed
+ installed (bool): if ``True``, the executable must be in the
+ install prefix
+ purpose (str): message to display before running test
+ skip_missing (bool): skip the test if the executable is not
+ in the install prefix bin directory or the provided work_dir
+ work_dir (str or None): path to the smoke test directory
+ """
+ wdir = '.' if work_dir is None else work_dir
+ with fsys.working_dir(wdir, create=True):
+ try:
+ runner = which(exe)
+ if runner is None and skip_missing:
+ return
+ assert runner is not None, \
+ "Failed to find executable '{0}'".format(exe)
+
+ self._run_test_helper(
+ runner, options, expected, status, installed, purpose)
+ print("PASSED")
+ return True
+ except BaseException as e:
+ # print a summary of the error to the log file
+ # so that cdash and junit reporters know about it
+ exc_type, _, tb = sys.exc_info()
+ print('FAILED: {0}'.format(e))
+ import traceback
+
+ # remove the current call frame to exclude the extract_stack
+ # call from the error
+ stack = traceback.extract_stack()[:-1]
+
+ # Package files have a line added at import time, so we re-read
+ # the file to make line numbers match. We have to subtract two
+ # from the line number because the original line number is
+ # inflated once by the import statement and the lines are
+ # displaced one by the import statement.
+ for i, entry in enumerate(stack):
+ filename, lineno, function, text = entry
+ if spack.repo.is_package_file(filename):
+ with open(filename, 'r') as f:
+ lines = f.readlines()
+ new_lineno = lineno - 2
+ text = lines[new_lineno]
+ stack[i] = (filename, new_lineno, function, text)
+
+ # Format the stack to print and print it
+ out = traceback.format_list(stack)
+ for line in out:
+ print(line.rstrip('\n'))
+
+ if exc_type is spack.util.executable.ProcessError:
+ out = six.StringIO()
+ spack.build_environment.write_log_summary(
+ out, 'test', self.test_log_file, last=1)
+ m = out.getvalue()
+ else:
+ # We're below the package context, so get context from
+ # stack instead of from traceback.
+ # The traceback is truncated here, so we can't use it to
+ # traverse the stack.
+ m = '\n'.join(
+ spack.build_environment.get_package_context(tb)
+ )
+
+ exc = e # e is deleted after this block
+
+ # If we fail fast, raise another error
+ if spack.config.get('config:fail_fast', False):
+ raise TestFailure([(exc, m)])
+ else:
+ self.test_failures.append((exc, m))
+ return False
+
+ def _run_test_helper(self, runner, options, expected, status, installed,
+ purpose):
+ status = [status] if isinstance(status, six.integer_types) else status
+ expected = [expected] if isinstance(expected, six.string_types) else \
+ expected
+ options = [options] if isinstance(options, six.string_types) else \
+ options
+
+ if purpose:
+ tty.msg(purpose)
+ else:
+ tty.debug('test: {0}: expect command status in {1}'
+ .format(runner.name, status))
+
+ if installed:
+ msg = "Executable '{0}' expected in prefix".format(runner.name)
+ msg += ", found in {0} instead".format(runner.path)
+ assert runner.path.startswith(self.spec.prefix), msg
+
+ try:
+ output = runner(*options, output=str.split, error=str.split)
+
+ assert 0 in status, \
+ 'Expected {0} execution to fail'.format(runner.name)
+ except ProcessError as err:
+ output = str(err)
+ match = re.search(r'exited with status ([0-9]+)', output)
+ if not (match and int(match.group(1)) in status):
+ raise
+
+ for check in expected:
+ cmd = ' '.join([runner.name] + options)
+ msg = "Expected '{0}' to match output of `{1}`".format(check, cmd)
+ msg += '\n\nOutput: {0}'.format(output)
+ assert re.search(check, output), msg
+
+ def unit_test_check(self):
+ """Hook for unit tests to assert things about package internals.
+
+ Unit tests can override this function to perform checks after
+ ``Package.install`` and all post-install hooks run, but before
+ the database is updated.
+
+ The overridden function may indicate that the install procedure
+ should terminate early (before updating the database) by
+ returning ``False`` (or any value such that ``bool(result)`` is
+ ``False``).
+
+ Return:
+ (bool): ``True`` to continue, ``False`` to skip ``install()``
+ """
+ return True
+
+ def sanity_check_prefix(self):
+ """This function checks whether install succeeded."""
+
+ def check_paths(path_list, filetype, predicate):
+ if isinstance(path_list, six.string_types):
+ path_list = [path_list]
+
+ for path in path_list:
+ abs_path = os.path.join(self.prefix, path)
+ if not predicate(abs_path):
+ raise InstallError(
+ "Install failed for %s. No such %s in prefix: %s" %
+ (self.name, filetype, path))
+
+ check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
+ check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
+
+ installed = set(os.listdir(self.prefix))
+ installed.difference_update(
+ spack.store.layout.hidden_file_regexes)
+ if not installed:
+ raise InstallError(
+ "Install failed for %s. Nothing was installed!" % self.name)
+
+ def apply_macos_rpath_fixups(self):
+ """On Darwin, make installed libraries more easily relocatable.
+
+ Some build systems (handrolled, autotools, makefiles) can set their own
+ rpaths that are duplicated by spack's compiler wrapper. This fixup
+ interrogates, and postprocesses if necessary, all libraries installed
+ by the code.
+
+ It should be added as a @run_after to packaging systems (or individual
+ packages) that do not install relocatable libraries by default.
+ """
+ if 'platform=darwin' not in self.spec:
+ return
+
+ from spack.relocate import fixup_macos_rpaths
+ fixup_macos_rpaths(self.spec)
+
+ @property
+ def build_log_path(self):
+ """
+ Return the expected (or current) build log file path. The path points
+ to the staging build file until the software is successfully installed,
+ when it points to the file in the installation directory.
+ """
+ return self.install_log_path if self.spec.installed else self.log_path
+
+ @classmethod
+ def inject_flags(cls, name, flags):
+ # type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
+ """
+ flag_handler that injects all flags through the compiler wrapper.
+ """
+ return flags, None, None
+
+ @classmethod
+ def env_flags(cls, name, flags):
+ # type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
+ """
+ flag_handler that adds all flags to canonical environment variables.
+ """
+ return None, flags, None
+
+ @classmethod
+ def build_system_flags(cls, name, flags):
+ # type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
+ """
+ flag_handler that passes flags to the build system arguments. Any
+ package using `build_system_flags` must also implement
+ `flags_to_build_system_args`, or derive from a class that
+ implements it. Currently, AutotoolsPackage and CMakePackage
+ implement it.
+ """
+ return None, None, flags
+
+ def setup_build_environment(self, env):
+ """Sets up the build environment for a package.
+
+ This method will be called before the current package prefix exists in
+ Spack's store.
+
+ Args:
+ env (spack.util.environment.EnvironmentModifications): environment
+ modifications to be applied when the package is built. Package authors
+ can call methods on it to alter the build environment.
+ """
+ pass
+
+ def setup_run_environment(self, env):
+ """Sets up the run environment for a package.
+
+ Args:
+ env (spack.util.environment.EnvironmentModifications): environment
+ modifications to be applied when the package is run. Package authors
+ can call methods on it to alter the run environment.
+ """
+ pass
+
+ def setup_dependent_build_environment(self, env, dependent_spec):
+ """Sets up the build environment of packages that depend on this one.
+
+ This is similar to ``setup_build_environment``, but it is used to
+ modify the build environments of packages that *depend* on this one.
+
+ This gives packages like Python and others that follow the extension
+ model a way to implement common environment or compile-time settings
+ for dependencies.
+
+ This method will be called before the dependent package prefix exists
+ in Spack's store.
+
+ Examples:
+ 1. Installing python modules generally requires ``PYTHONPATH``
+ to point to the ``lib/pythonX.Y/site-packages`` directory in the
+ module's install prefix. This method could be used to set that
+ variable.
+
+ Args:
+ env (spack.util.environment.EnvironmentModifications): environment
+ modifications to be applied when the dependent package is built.
+ Package authors can call methods on it to alter the build environment.
+
+ dependent_spec (spack.spec.Spec): the spec of the dependent package
+ about to be built. This allows the extendee (self) to query
+ the dependent's state. Note that *this* package's spec is
+ available as ``self.spec``
+ """
+ pass
+
+ def setup_dependent_run_environment(self, env, dependent_spec):
+ """Sets up the run environment of packages that depend on this one.
+
+ This is similar to ``setup_run_environment``, but it is used to
+ modify the run environments of packages that *depend* on this one.
+
+ This gives packages like Python and others that follow the extension
+ model a way to implement common environment or run-time settings
+ for dependencies.
+
+ Args:
+ env (spack.util.environment.EnvironmentModifications): environment
+ modifications to be applied when the dependent package is run.
+ Package authors can call methods on it to alter the build environment.
+
+ dependent_spec (spack.spec.Spec): The spec of the dependent package
+ about to be run. This allows the extendee (self) to query
+ the dependent's state. Note that *this* package's spec is
+ available as ``self.spec``
+ """
+ pass
+
+ def setup_dependent_package(self, module, dependent_spec):
+ """Set up Python module-scope variables for dependent packages.
+
+ Called before the install() method of dependents.
+
+ Default implementation does nothing, but this can be
+ overridden by an extendable package to set up the module of
+ its extensions. This is useful if there are some common steps
+ to installing all extensions for a certain package.
+
+ Examples:
+
+ 1. Extensions often need to invoke the ``python`` interpreter
+ from the Python installation being extended. This routine
+ can put a ``python()`` Executable object in the module scope
+ for the extension package to simplify extension installs.
+
+ 2. MPI compilers could set some variables in the dependent's
+ scope that point to ``mpicc``, ``mpicxx``, etc., allowing
+ them to be called by common name regardless of which MPI is used.
+
+ 3. BLAS/LAPACK implementations can set some variables
+ indicating the path to their libraries, since these
+ paths differ by BLAS/LAPACK implementation.
+
+ Args:
+ module (spack.package_base.PackageBase.module): The Python ``module``
+ object of the dependent package. Packages can use this to set
+ module-scope variables for the dependent to use.
+
+ dependent_spec (spack.spec.Spec): The spec of the dependent package
+ about to be built. This allows the extendee (self) to
+ query the dependent's state. Note that *this*
+ package's spec is available as ``self.spec``.
+ """
+ pass
+
+ _flag_handler = None # type: Optional[FLAG_HANDLER_TYPE]
+
+ @property
+ def flag_handler(self):
+ # type: () -> FLAG_HANDLER_TYPE
+ if self._flag_handler is None:
+ self._flag_handler = PackageBase.inject_flags
+ return self._flag_handler
+
+ @flag_handler.setter
+ def flag_handler(self, var):
+ # type: (FLAG_HANDLER_TYPE) -> None
+ self._flag_handler = var
+
+ # The flag handler method is called for each of the allowed compiler flags.
+ # It returns a triple of inject_flags, env_flags, build_system_flags.
+ # The flags returned as inject_flags are injected through the spack
+ # compiler wrappers.
+ # The flags returned as env_flags are passed to the build system through
+ # the environment variables of the same name.
+ # The flags returned as build_system_flags are passed to the build system
+ # package subclass to be turned into the appropriate part of the standard
+ # arguments. This is implemented for build system classes where
+ # appropriate and will otherwise raise a NotImplementedError.
+
+ def flags_to_build_system_args(self, flags):
+ # Takes flags as a dict name: list of values
+ if any(v for v in flags.values()):
+ msg = 'The {0} build system'.format(self.__class__.__name__)
+ msg += ' cannot take command line arguments for compiler flags'
+ raise NotImplementedError(msg)
+
+ @staticmethod
+ def uninstall_by_spec(spec, force=False, deprecator=None):
+ if not os.path.isdir(spec.prefix):
+ # prefix may not exist, but DB may be inconsistent. Try to fix by
+ # removing, but omit hooks.
+ specs = spack.store.db.query(spec, installed=True)
+ if specs:
+ if deprecator:
+ spack.store.db.deprecate(specs[0], deprecator)
+ tty.debug('Deprecating stale DB entry for {0}'
+ .format(spec.short_spec))
+ else:
+ spack.store.db.remove(specs[0])
+ tty.debug('Removed stale DB entry for {0}'
+ .format(spec.short_spec))
+ return
+ else:
+ raise InstallError(str(spec) + " is not installed.")
+
+ if not force:
+ dependents = spack.store.db.installed_relatives(
+ spec,
+ direction='parents',
+ transitive=True,
+ deptype=("link", "run"),
+ )
+ if dependents:
+ raise PackageStillNeededError(spec, dependents)
+
+ # Try to get the package for the spec
+ try:
+ pkg = spec.package
+ except spack.repo.UnknownEntityError:
+ pkg = None
+
+ # Pre-uninstall hook runs first.
+ with spack.store.db.prefix_write_lock(spec):
+
+ if pkg is not None:
+ try:
+ spack.hooks.pre_uninstall(spec)
+ except Exception as error:
+ if force:
+ error_msg = (
+ "One or more pre_uninstall hooks have failed"
+ " for {0}, but Spack is continuing with the"
+ " uninstall".format(str(spec)))
+ if isinstance(error, spack.error.SpackError):
+ error_msg += (
+ "\n\nError message: {0}".format(str(error)))
+ tty.warn(error_msg)
+ # Note that if the uninstall succeeds then we won't be
+ # seeing this error again and won't have another chance
+ # to run the hook.
+ else:
+ raise
+
+ # Uninstalling in Spack only requires removing the prefix.
+ if not spec.external:
+ msg = 'Deleting package prefix [{0}]'
+ tty.debug(msg.format(spec.short_spec))
+ # test if spec is already deprecated, not whether we want to
+ # deprecate it now
+ deprecated = bool(spack.store.db.deprecator(spec))
+ spack.store.layout.remove_install_directory(spec, deprecated)
+ # Delete DB entry
+ if deprecator:
+ msg = 'deprecating DB entry [{0}] in favor of [{1}]'
+ tty.debug(msg.format(spec.short_spec, deprecator.short_spec))
+ spack.store.db.deprecate(spec, deprecator)
+ else:
+ msg = 'Deleting DB entry [{0}]'
+ tty.debug(msg.format(spec.short_spec))
+ spack.store.db.remove(spec)
+
+ if pkg is not None:
+ try:
+ spack.hooks.post_uninstall(spec)
+ except Exception:
+ # If there is a failure here, this is our only chance to do
+ # something about it: at this point the Spec has been removed
+ # from the DB and prefix, so the post-uninstallation hooks
+ # will not have another chance to run.
+ error_msg = (
+ "One or more post-uninstallation hooks failed for"
+ " {0}, but the prefix has been removed (if it is not"
+ " external).".format(str(spec)))
+ tb_msg = traceback.format_exc()
+ error_msg += "\n\nThe error:\n\n{0}".format(tb_msg)
+ tty.warn(error_msg)
+
+ tty.msg('Successfully uninstalled {0}'.format(spec.short_spec))
+
+ def do_uninstall(self, force=False):
+ """Uninstall this package by spec."""
+ # delegate to instance-less method.
+ Package.uninstall_by_spec(self.spec, force)
+
+ def do_deprecate(self, deprecator, link_fn):
+ """Deprecate this package in favor of deprecator spec"""
+ spec = self.spec
+
+ # Check whether package to deprecate has active extensions
+ if self.extendable:
+ view = spack.filesystem_view.YamlFilesystemView(spec.prefix,
+ spack.store.layout)
+ active_exts = view.extensions_layout.extension_map(spec).values()
+ if active_exts:
+ short = spec.format('{name}/{hash:7}')
+ m = "Spec %s has active extensions\n" % short
+ for active in active_exts:
+ m += ' %s\n' % active.format('{name}/{hash:7}')
+ m += "Deactivate extensions before deprecating %s" % short
+ tty.die(m)
+
+ # Check whether package to deprecate is an active extension
+ if self.is_extension:
+ extendee = self.extendee_spec
+ view = spack.filesystem_view.YamlFilesystemView(extendee.prefix,
+ spack.store.layout)
+
+ if self.is_activated(view):
+ short = spec.format('{name}/{hash:7}')
+ short_ext = extendee.format('{name}/{hash:7}')
+ msg = "Spec %s is an active extension of %s\n" % (short,
+ short_ext)
+ msg += "Deactivate %s to be able to deprecate it" % short
+ tty.die(msg)
+
+ # Install deprecator if it isn't installed already
+ if not spack.store.db.query(deprecator):
+ deprecator.package.do_install()
+
+ old_deprecator = spack.store.db.deprecator(spec)
+ if old_deprecator:
+ # Find this specs yaml file from its old deprecation
+ self_yaml = spack.store.layout.deprecated_file_path(spec,
+ old_deprecator)
+ else:
+ self_yaml = spack.store.layout.spec_file_path(spec)
+
+ # copy spec metadata to "deprecated" dir of deprecator
+ depr_yaml = spack.store.layout.deprecated_file_path(spec,
+ deprecator)
+ fsys.mkdirp(os.path.dirname(depr_yaml))
+ shutil.copy2(self_yaml, depr_yaml)
+
+ # Any specs deprecated in favor of this spec are re-deprecated in
+ # favor of its new deprecator
+ for deprecated in spack.store.db.specs_deprecated_by(spec):
+ deprecated.package.do_deprecate(deprecator, link_fn)
+
+ # Now that we've handled metadata, uninstall and replace with link
+ Package.uninstall_by_spec(spec, force=True, deprecator=deprecator)
+ link_fn(deprecator.prefix, spec.prefix)
+
+ def _check_extendable(self):
+ if not self.extendable:
+ raise ValueError("Package %s is not extendable!" % self.name)
+
+ def _sanity_check_extension(self):
+ if not self.is_extension:
+ raise ActivationError("This package is not an extension.")
+
+ extendee_package = self.extendee_spec.package
+ extendee_package._check_extendable()
+
+ if not self.extendee_spec.installed:
+ raise ActivationError(
+ "Can only (de)activate extensions for installed packages.")
+ if not self.spec.installed:
+ raise ActivationError("Extensions must first be installed.")
+ if self.extendee_spec.name not in self.extendees:
+ raise ActivationError("%s does not extend %s!" %
+ (self.name, self.extendee.name))
+
+ def do_activate(self, view=None, with_dependencies=True, verbose=True):
+ """Called on an extension to invoke the extendee's activate method.
+
+ Commands should call this routine, and should not call
+ activate() directly.
+ """
+ if verbose:
+ tty.msg('Activating extension {0} for {1}'.format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+
+ self._sanity_check_extension()
+ if not view:
+ view = YamlFilesystemView(
+ self.extendee_spec.prefix, spack.store.layout)
+
+ extensions_layout = view.extensions_layout
+
+ try:
+ extensions_layout.check_extension_conflict(
+ self.extendee_spec, self.spec)
+ except spack.directory_layout.ExtensionAlreadyInstalledError as e:
+ # already installed, let caller know
+ tty.msg(e.message)
+ return
+
+ # Activate any package dependencies that are also extensions.
+ if with_dependencies:
+ for spec in self.dependency_activations():
+ if not spec.package.is_activated(view):
+ spec.package.do_activate(
+ view, with_dependencies=with_dependencies,
+ verbose=verbose)
+
+ self.extendee_spec.package.activate(
+ self, view, **self.extendee_args)
+
+ extensions_layout.add_extension(self.extendee_spec, self.spec)
+
+ if verbose:
+ tty.debug('Activated extension {0} for {1}'.format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+
+ def dependency_activations(self):
+ return (spec for spec in self.spec.traverse(root=False, deptype='run')
+ if spec.package.extends(self.extendee_spec))
+
+ def activate(self, extension, view, **kwargs):
+ """
+ Add the extension to the specified view.
+
+ Package authors can override this function to maintain some
+ centralized state related to the set of activated extensions
+ for a package.
+
+ Spack internals (commands, hooks, etc.) should call
+ do_activate() method so that proper checks are always executed.
+ """
+ view.merge(extension.spec, ignore=kwargs.get('ignore', None))
+
+ def do_deactivate(self, view=None, **kwargs):
+ """Remove this extension package from the specified view. Called
+ on the extension to invoke extendee's deactivate() method.
+
+ `remove_dependents=True` deactivates extensions depending on this
+ package instead of raising an error.
+ """
+ self._sanity_check_extension()
+ force = kwargs.get('force', False)
+ verbose = kwargs.get('verbose', True)
+ remove_dependents = kwargs.get('remove_dependents', False)
+
+ if verbose:
+ tty.msg('Deactivating extension {0} for {1}'.format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+
+ if not view:
+ view = YamlFilesystemView(
+ self.extendee_spec.prefix, spack.store.layout)
+ extensions_layout = view.extensions_layout
+
+ # Allow a force deactivate to happen. This can unlink
+ # spurious files if something was corrupted.
+ if not force:
+ extensions_layout.check_activated(
+ self.extendee_spec, self.spec)
+
+ activated = extensions_layout.extension_map(
+ self.extendee_spec)
+ for name, aspec in activated.items():
+ if aspec == self.spec:
+ continue
+ for dep in aspec.traverse(deptype='run'):
+ if self.spec == dep:
+ if remove_dependents:
+ aspec.package.do_deactivate(**kwargs)
+ else:
+ msg = ('Cannot deactivate {0} because {1} is '
+ 'activated and depends on it')
+ raise ActivationError(msg.format(
+ self.spec.cshort_spec, aspec.cshort_spec))
+
+ self.extendee_spec.package.deactivate(
+ self, view, **self.extendee_args)
+
+ # redundant activation check -- makes SURE the spec is not
+ # still activated even if something was wrong above.
+ if self.is_activated(view):
+ extensions_layout.remove_extension(
+ self.extendee_spec, self.spec)
+
+ if verbose:
+ tty.debug('Deactivated extension {0} for {1}'.format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+
+ def deactivate(self, extension, view, **kwargs):
+ """
+ Remove all extension files from the specified view.
+
+ Package authors can override this method to support other
+ extension mechanisms. Spack internals (commands, hooks, etc.)
+ should call do_deactivate() method so that proper checks are
+ always executed.
+ """
+ view.unmerge(extension.spec, ignore=kwargs.get('ignore', None))
+
+ def view(self):
+ """Create a view with the prefix of this package as the root.
+ Extensions added to this view will modify the installation prefix of
+ this package.
+ """
+ return YamlFilesystemView(self.prefix, spack.store.layout)
+
+ def do_restage(self):
+ """Reverts expanded/checked out source to a pristine state."""
+ self.stage.restage()
+
+ def do_clean(self):
+ """Removes the package's build stage and source tarball."""
+ for patch in self.spec.patches:
+ patch.clean()
+
+ self.stage.destroy()
+
+ def format_doc(self, **kwargs):
+ """Wrap doc string at 72 characters and format nicely"""
+ indent = kwargs.get('indent', 0)
+
+ if not self.__doc__:
+ return ""
+
+ doc = re.sub(r'\s+', ' ', self.__doc__)
+ lines = textwrap.wrap(doc, 72)
+ results = six.StringIO()
+ for line in lines:
+ results.write((" " * indent) + line + "\n")
+ return results.getvalue()
+
+ @property
+ def all_urls(self):
+ """A list of all URLs in a package.
+
+ Check both class-level and version-specific URLs.
+
+ Returns:
+ list: a list of URLs
+ """
+ urls = []
+ if hasattr(self, 'url') and self.url:
+ urls.append(self.url)
+
+ # fetch from first entry in urls to save time
+ if hasattr(self, 'urls') and self.urls:
+ urls.append(self.urls[0])
+
+ for args in self.versions.values():
+ if 'url' in args:
+ urls.append(args['url'])
+ return urls
+
+ def fetch_remote_versions(self, concurrency=128):
+ """Find remote versions of this package.
+
+ Uses ``list_url`` and any other URLs listed in the package file.
+
+ Returns:
+ dict: a dictionary mapping versions to URLs
+ """
+ if not self.all_urls:
+ return {}
+
+ try:
+ return spack.util.web.find_versions_of_archive(
+ self.all_urls,
+ self.list_url,
+ self.list_depth,
+ concurrency,
+ reference_package=self,
+ )
+ except spack.util.web.NoNetworkConnectionError as e:
+ tty.die("Package.fetch_versions couldn't connect to:", e.url,
+ e.message)
+
+ @property
+ def rpath(self):
+ """Get the rpath this package links with, as a list of paths."""
+ rpaths = [self.prefix.lib, self.prefix.lib64]
+ deps = self.spec.dependencies(deptype='link')
+ rpaths.extend(d.prefix.lib for d in deps
+ if os.path.isdir(d.prefix.lib))
+ rpaths.extend(d.prefix.lib64 for d in deps
+ if os.path.isdir(d.prefix.lib64))
+ return rpaths
+
+ @property
+ def rpath_args(self):
+ """
+ Get the rpath args as a string, with -Wl,-rpath, for each element
+ """
+ return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
+
+ def _run_test_callbacks(self, method_names, callback_type='install'):
+ """Tries to call all of the listed methods, returning immediately
+ if the list is None."""
+ if method_names is None:
+ return
+
+ fail_fast = spack.config.get('config:fail_fast', False)
+
+ with self._setup_test(verbose=False, externals=False) as logger:
+ # Report running each of the methods in the build log
+ print_test_message(
+ logger, 'Running {0}-time tests'.format(callback_type), True)
+
+ for name in method_names:
+ try:
+ fn = getattr(self, name)
+
+ msg = 'RUN-TESTS: {0}-time tests [{1}]' \
+ .format(callback_type, name),
+ print_test_message(logger, msg, True)
+
+ fn()
+ except AttributeError as e:
+ msg = 'RUN-TESTS: method not implemented [{0}]' \
+ .format(name),
+ print_test_message(logger, msg, True)
+
+ self.test_failures.append((e, msg))
+ if fail_fast:
+ break
+
+ # Raise any collected failures here
+ if self.test_failures:
+ raise TestFailure(self.test_failures)
+
+ @on_package_attributes(run_tests=True)
+ def _run_default_build_time_test_callbacks(self):
+ """Tries to call all the methods that are listed in the attribute
+ ``build_time_test_callbacks`` if ``self.run_tests is True``.
+ """
+ self._run_test_callbacks(self.build_time_test_callbacks, 'build')
+
+ @on_package_attributes(run_tests=True)
+ def _run_default_install_time_test_callbacks(self):
+ """Tries to call all the methods that are listed in the attribute
+ ``install_time_test_callbacks`` if ``self.run_tests is True``.
+ """
+ self._run_test_callbacks(self.install_time_test_callbacks, 'install')
+
+
+def has_test_method(pkg):
+ """Determine if the package defines its own stand-alone test method.
+
+ Args:
+ pkg (str): the package being checked
+
+ Returns:
+ (bool): ``True`` if the package overrides the default method; else
+ ``False``
+ """
+ if not inspect.isclass(pkg):
+ tty.die('{0}: is not a class, it is {1}'.format(pkg, type(pkg)))
+
+ return (
+ (issubclass(pkg, PackageBase) and pkg.test != PackageBase.test) or
+ (isinstance(pkg, PackageBase) and pkg.test.__func__ != PackageBase.test)
+ )
+
+
+def print_test_message(logger, msg, verbose):
+ if verbose:
+ with logger.force_echo():
+ tty.msg(msg)
+ else:
+ tty.msg(msg)
+
+
+def test_process(pkg, kwargs):
+ verbose = kwargs.get('verbose', False)
+ externals = kwargs.get('externals', False)
+
+ with pkg._setup_test(verbose, externals) as logger:
+ if pkg.spec.external and not externals:
+ print_test_message(
+ logger, 'Skipped tests for external package', verbose)
+ return
+
+ # run test methods from the package and all virtuals it
+ # provides virtuals have to be deduped by name
+ v_names = list(set([vspec.name
+ for vspec in pkg.virtuals_provided]))
+
+ # hack for compilers that are not dependencies (yet)
+ # TODO: this all eventually goes away
+ c_names = ('gcc', 'intel', 'intel-parallel-studio', 'pgi')
+ if pkg.name in c_names:
+ v_names.extend(['c', 'cxx', 'fortran'])
+ if pkg.spec.satisfies('llvm+clang'):
+ v_names.extend(['c', 'cxx'])
+
+ test_specs = [pkg.spec] + [spack.spec.Spec(v_name)
+ for v_name in sorted(v_names)]
+
+ ran_actual_test_function = False
+ try:
+ with fsys.working_dir(pkg.test_suite.test_dir_for_spec(pkg.spec)):
+ for spec in test_specs:
+ pkg.test_suite.current_test_spec = spec
+ # Fail gracefully if a virtual has no package/tests
+ try:
+ spec_pkg = spec.package
+ except spack.repo.UnknownPackageError:
+ continue
+
+ # copy installed test sources cache into test cache dir
+ if spec.concrete:
+ cache_source = spec_pkg.install_test_root
+ cache_dir = pkg.test_suite.current_test_cache_dir
+ if (os.path.isdir(cache_source) and
+ not os.path.exists(cache_dir)):
+ fsys.install_tree(cache_source, cache_dir)
+
+ # copy test data into test data dir
+ data_source = Prefix(spec_pkg.package_dir).test
+ data_dir = pkg.test_suite.current_test_data_dir
+ if (os.path.isdir(data_source) and
+ not os.path.exists(data_dir)):
+ # We assume data dir is used read-only
+ # maybe enforce this later
+ shutil.copytree(data_source, data_dir)
+
+ # grab the function for each method so we can call
+ # it with the package
+ test_fn = spec_pkg.__class__.test
+ if not isinstance(test_fn, types.FunctionType):
+ test_fn = test_fn.__func__
+
+ # Skip any test methods consisting solely of 'pass'
+ # since they do not contribute to package testing.
+ source = (inspect.getsource(test_fn)).splitlines()[1:]
+ lines = (ln.strip() for ln in source)
+ statements = [ln for ln in lines if not ln.startswith('#')]
+ if len(statements) > 0 and statements[0] == 'pass':
+ continue
+
+ # Run the tests
+ ran_actual_test_function = True
+ context = logger.force_echo if verbose else nullcontext
+ with context():
+ test_fn(pkg)
+
+ # If fail-fast was on, we error out above
+ # If we collect errors, raise them in batch here
+ if pkg.test_failures:
+ raise TestFailure(pkg.test_failures)
+
+ finally:
+ # flag the package as having been tested (i.e., ran one or more
+ # non-pass-only methods
+ if ran_actual_test_function:
+ fsys.touch(pkg.tested_file)
+ else:
+ print_test_message(logger, 'No tests to run', verbose)
+
+
+inject_flags = PackageBase.inject_flags
+env_flags = PackageBase.env_flags
+build_system_flags = PackageBase.build_system_flags
+
+
+class BundlePackage(PackageBase):
+ """General purpose bundle, or no-code, package class."""
+ #: There are no phases by default but the property is required to support
+ #: post-install hooks (e.g., for module generation).
+ phases = [] # type: List[str]
+ #: This attribute is used in UI queries that require to know which
+ #: build-system class we are using
+ build_system_class = 'BundlePackage'
+
+ #: Bundle packages do not have associated source or binary code.
+ has_code = False
+
+
+class Package(PackageBase):
+ """General purpose class with a single ``install``
+ phase that needs to be coded by packagers.
+ """
+ #: The one and only phase
+ phases = ['install']
+ #: This attribute is used in UI queries that require to know which
+ #: build-system class we are using
+ build_system_class = 'Package'
+ # This will be used as a registration decorator in user
+ # packages, if need be
+ run_after('install')(PackageBase.sanity_check_prefix)
+ # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
+ run_after('install')(PackageBase.apply_macos_rpath_fixups)
+
+
+def install_dependency_symlinks(pkg, spec, prefix):
+ """
+ Execute a dummy install and flatten dependencies.
+
+ This routine can be used in a ``package.py`` definition by setting
+ ``install = install_dependency_symlinks``.
+
+ This feature comes in handy for creating a common location for the
+ the installation of third-party libraries.
+ """
+ flatten_dependencies(spec, prefix)
+
+
+def use_cray_compiler_names():
+ """Compiler names for builds that rely on cray compiler names."""
+ os.environ['CC'] = 'cc'
+ os.environ['CXX'] = 'CC'
+ os.environ['FC'] = 'ftn'
+ os.environ['F77'] = 'ftn'
+
+
+def flatten_dependencies(spec, flat_dir):
+ """Make each dependency of spec present in dir via symlink."""
+ for dep in spec.traverse(root=False):
+ name = dep.name
+
+ dep_path = spack.store.layout.path_for_spec(dep)
+ dep_files = LinkTree(dep_path)
+
+ os.mkdir(flat_dir + '/' + name)
+
+ conflict = dep_files.find_conflict(flat_dir + '/' + name)
+ if conflict:
+ raise DependencyConflictError(conflict)
+
+ dep_files.merge(flat_dir + '/' + name)
+
+
+def possible_dependencies(*pkg_or_spec, **kwargs):
+ """Get the possible dependencies of a number of packages.
+
+ See ``PackageBase.possible_dependencies`` for details.
+ """
+ packages = []
+ for pos in pkg_or_spec:
+ if isinstance(pos, PackageMeta):
+ packages.append(pos)
+ continue
+
+ if not isinstance(pos, spack.spec.Spec):
+ pos = spack.spec.Spec(pos)
+
+ if spack.repo.path.is_virtual(pos.name):
+ packages.extend(
+ p.package_class
+ for p in spack.repo.path.providers_for(pos.name)
+ )
+ continue
+ else:
+ packages.append(pos.package_class)
+
+ visited = {}
+ for pkg in packages:
+ pkg.possible_dependencies(visited=visited, **kwargs)
+
+ return visited
+
+
+class FetchError(spack.error.SpackError):
+ """Raised when something goes wrong during fetch."""
+
+ def __init__(self, message, long_msg=None):
+ super(FetchError, self).__init__(message, long_msg)
+
+
+class PackageStillNeededError(InstallError):
+ """Raised when package is still needed by another on uninstall."""
+ def __init__(self, spec, dependents):
+ super(PackageStillNeededError, self).__init__("Cannot uninstall %s" %
+ spec)
+ self.spec = spec
+ self.dependents = dependents
+
+
+class PackageError(spack.error.SpackError):
+ """Raised when something is wrong with a package definition."""
+ def __init__(self, message, long_msg=None):
+ super(PackageError, self).__init__(message, long_msg)
+
+
+class PackageVersionError(PackageError):
+ """Raised when a version URL cannot automatically be determined."""
+ def __init__(self, version):
+ super(PackageVersionError, self).__init__(
+ "Cannot determine a URL automatically for version %s" % version,
+ "Please provide a url for this version in the package.py file.")
+
+
+class NoURLError(PackageError):
+ """Raised when someone tries to build a URL for a package with no URLs."""
+
+ def __init__(self, cls):
+ super(NoURLError, self).__init__(
+ "Package %s has no version with a URL." % cls.__name__)
+
+
+class InvalidPackageOpError(PackageError):
+ """Raised when someone tries perform an invalid operation on a package."""
+
+
+class ExtensionError(PackageError):
+ """Superclass for all errors having to do with extension packages."""
+
+
+class ActivationError(ExtensionError):
+ """Raised when there are problems activating an extension."""
+ def __init__(self, msg, long_msg=None):
+ super(ActivationError, self).__init__(msg, long_msg)
+
+
+class DependencyConflictError(spack.error.SpackError):
+ """Raised when the dependencies cannot be flattened as asked for."""
+ def __init__(self, conflict):
+ super(DependencyConflictError, self).__init__(
+ "%s conflicts with another file in the flattened directory." % (
+ conflict))
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index 33f9fa57eb..237e85ad75 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -356,7 +356,7 @@ class PatchCache(object):
Arguments:
sha256 (str): sha256 hash to look up
- pkg (spack.package.Package): Package object to get patch for.
+ pkg (spack.package_base.Package): Package object to get patch for.
We build patch objects lazily because building them requires that
we have information about the package's location in its repo.
diff --git a/lib/spack/spack/pkgkit.py b/lib/spack/spack/pkgkit.py
deleted file mode 100644
index c7fbfe1af8..0000000000
--- a/lib/spack/spack/pkgkit.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-# flake8: noqa: F401
-"""pkgkit is a set of useful build tools and directives for packages.
-
-Everything in this module is automatically imported into Spack package files.
-"""
-import llnl.util.filesystem
-from llnl.util.filesystem import *
-
-import spack.directives
-import spack.util.executable
-from spack.build_systems.aspell_dict import AspellDictPackage
-from spack.build_systems.autotools import AutotoolsPackage
-from spack.build_systems.cached_cmake import (
- CachedCMakePackage,
- cmake_cache_option,
- cmake_cache_path,
- cmake_cache_string,
-)
-from spack.build_systems.cmake import CMakePackage
-from spack.build_systems.cuda import CudaPackage
-from spack.build_systems.gnu import GNUMirrorPackage
-from spack.build_systems.intel import IntelPackage
-from spack.build_systems.lua import LuaPackage
-from spack.build_systems.makefile import MakefilePackage
-from spack.build_systems.maven import MavenPackage
-from spack.build_systems.meson import MesonPackage
-from spack.build_systems.octave import OctavePackage
-from spack.build_systems.oneapi import (
- IntelOneApiLibraryPackage,
- IntelOneApiPackage,
- IntelOneApiStaticLibraryList,
-)
-from spack.build_systems.perl import PerlPackage
-from spack.build_systems.python import PythonPackage
-from spack.build_systems.qmake import QMakePackage
-from spack.build_systems.r import RPackage
-from spack.build_systems.racket import RacketPackage
-from spack.build_systems.rocm import ROCmPackage
-from spack.build_systems.ruby import RubyPackage
-from spack.build_systems.scons import SConsPackage
-from spack.build_systems.sip import SIPPackage
-from spack.build_systems.sourceforge import SourceforgePackage
-from spack.build_systems.sourceware import SourcewarePackage
-from spack.build_systems.waf import WafPackage
-from spack.build_systems.xorg import XorgPackage
-from spack.dependency import all_deptypes
-from spack.directives import *
-from spack.install_test import get_escaped_text_output
-from spack.installer import (
- ExternalPackageError,
- InstallError,
- InstallLockError,
- UpstreamPackageError,
-)
-from spack.mixins import filter_compiler_wrappers
-from spack.multimethod import when
-from spack.package import (
- BundlePackage,
- DependencyConflictError,
- Package,
- build_system_flags,
- env_flags,
- flatten_dependencies,
- inject_flags,
- install_dependency_symlinks,
- on_package_attributes,
- run_after,
- run_before,
-)
-from spack.spec import InvalidSpecDetected, Spec
-from spack.util.executable import *
-from spack.variant import (
- any_combination_of,
- auto_or_any_combination_of,
- conditional,
- disjoint_sets,
-)
-from spack.version import Version, ver
diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py
index 50f0b8a9fb..82ac03ecdc 100644
--- a/lib/spack/spack/repo.py
+++ b/lib/spack/spack/repo.py
@@ -202,17 +202,11 @@ class RepoLoader(_PrependFileLoader):
"""Loads a Python module associated with a package in specific repository"""
#: Code in ``_package_prepend`` is prepended to imported packages.
#:
- #: Spack packages were originally expected to call `from spack import *`
- #: themselves, but it became difficult to manage and imports in the Spack
- #: core the top-level namespace polluted by package symbols this way. To
- #: solve this, the top-level ``spack`` package contains very few symbols
- #: of its own, and importing ``*`` is essentially a no-op. The common
- #: routines and directives that packages need are now in ``spack.pkgkit``,
- #: and the import system forces packages to automatically include
- #: this. This way, old packages that call ``from spack import *`` will
- #: continue to work without modification, but it's no longer required.
+ #: Spack packages are expected to call `from spack.package import *`
+ #: themselves, but we are allowing a deprecation period before breaking
+ #: external repos that don't do this yet.
_package_prepend = ('from __future__ import absolute_import;'
- 'from spack.pkgkit import *')
+ 'from spack.package import *')
def __init__(self, fullname, repo, package_name):
self.repo = repo
@@ -450,10 +444,10 @@ def is_package_file(filename):
# Package files are named `package.py` and are not in lib/spack/spack
# We have to remove the file extension because it can be .py and can be
# .pyc depending on context, and can differ between the files
- import spack.package # break cycle
+ import spack.package_base # break cycle
filename_noext = os.path.splitext(filename)[0]
packagebase_filename_noext = os.path.splitext(
- inspect.getfile(spack.package.PackageBase))[0]
+ inspect.getfile(spack.package_base.PackageBase))[0]
return (filename_noext != packagebase_filename_noext and
os.path.basename(filename_noext) == 'package')
diff --git a/lib/spack/spack/report.py b/lib/spack/spack/report.py
index c8975a8f77..b67f2e71cc 100644
--- a/lib/spack/spack/report.py
+++ b/lib/spack/spack/report.py
@@ -15,7 +15,7 @@ import llnl.util.lang
import spack.build_environment
import spack.fetch_strategy
-import spack.package
+import spack.package_base
from spack.install_test import TestSuite
from spack.reporter import Reporter
from spack.reporters.cdash import CDash
@@ -131,7 +131,7 @@ class InfoCollector(object):
"""
@functools.wraps(do_fn)
def wrapper(instance, *args, **kwargs):
- if isinstance(instance, spack.package.PackageBase):
+ if isinstance(instance, spack.package_base.PackageBase):
pkg = instance
elif hasattr(args[0], 'pkg'):
pkg = args[0].pkg
diff --git a/lib/spack/spack/reporters/cdash.py b/lib/spack/spack/reporters/cdash.py
index d947becde8..388884039d 100644
--- a/lib/spack/spack/reporters/cdash.py
+++ b/lib/spack/spack/reporters/cdash.py
@@ -22,7 +22,7 @@ from llnl.util.filesystem import working_dir
import spack.build_environment
import spack.fetch_strategy
-import spack.package
+import spack.package_base
from spack.error import SpackError
from spack.reporter import Reporter
from spack.util.crypto import checksum
diff --git a/lib/spack/spack/reporters/junit.py b/lib/spack/spack/reporters/junit.py
index 8ed299fe23..fa19bdd2bd 100644
--- a/lib/spack/spack/reporters/junit.py
+++ b/lib/spack/spack/reporters/junit.py
@@ -7,7 +7,7 @@ import posixpath
import spack.build_environment
import spack.fetch_strategy
-import spack.package
+import spack.package_base
from spack.reporter import Reporter
__all__ = ['JUnit']
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index 1b3eb8b1a8..bcdb929fdc 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -41,7 +41,7 @@ import spack.dependency
import spack.directives
import spack.environment as ev
import spack.error
-import spack.package
+import spack.package_base
import spack.package_prefs
import spack.platforms
import spack.repo
@@ -1822,7 +1822,7 @@ class SpackSolverSetup(object):
self.possible_virtuals = set(
x.name for x in specs if x.virtual
)
- possible = spack.package.possible_dependencies(
+ possible = spack.package_base.possible_dependencies(
*specs,
virtuals=self.possible_virtuals,
deptype=spack.dependency.all_deptypes
diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py
index 5c1f3c293c..304fa4c7bd 100644
--- a/lib/spack/spack/test/build_systems.py
+++ b/lib/spack/spack/test/build_systems.py
@@ -128,7 +128,7 @@ def test_cmake_bad_generator(config, mock_packages):
s.concretize()
pkg = spack.repo.get(s)
pkg.generator = 'Yellow Sticky Notes'
- with pytest.raises(spack.package.InstallError):
+ with pytest.raises(spack.package_base.InstallError):
get_std_cmake_args(pkg)
diff --git a/lib/spack/spack/test/cmd/analyze.py b/lib/spack/spack/test/cmd/analyze.py
index 77f26ccb23..17f5331804 100644
--- a/lib/spack/spack/test/cmd/analyze.py
+++ b/lib/spack/spack/test/cmd/analyze.py
@@ -10,7 +10,7 @@ import pytest
import spack.cmd.install
import spack.config
-import spack.package
+import spack.package_base
import spack.util.spack_json as sjson
from spack.main import SpackCommand
from spack.spec import Spec
diff --git a/lib/spack/spack/test/cmd/clean.py b/lib/spack/spack/test/cmd/clean.py
index fb0bb87ed0..82c8833a0c 100644
--- a/lib/spack/spack/test/cmd/clean.py
+++ b/lib/spack/spack/test/cmd/clean.py
@@ -9,7 +9,7 @@ import pytest
import spack.caches
import spack.main
-import spack.package
+import spack.package_base
import spack.stage
clean = spack.main.SpackCommand('clean')
@@ -31,7 +31,7 @@ def mock_calls_for_clean(monkeypatch):
def __call__(self, *args, **kwargs):
counts[self.name] += 1
- monkeypatch.setattr(spack.package.PackageBase, 'do_clean',
+ monkeypatch.setattr(spack.package_base.PackageBase, 'do_clean',
Counter('package'))
monkeypatch.setattr(spack.stage, 'purge', Counter('stages'))
monkeypatch.setattr(
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index 4b9f578c34..f9194f664d 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -20,7 +20,7 @@ import spack.compilers as compilers
import spack.config
import spack.environment as ev
import spack.hash_types as ht
-import spack.package
+import spack.package_base
import spack.util.executable
from spack.error import SpackError
from spack.main import SpackCommand
@@ -66,7 +66,7 @@ def test_install_package_and_dependency(
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert not pkg.run_tests
- monkeypatch.setattr(spack.package.PackageBase, 'unit_test_check', check)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', check)
install('-v', 'dttop')
@@ -75,7 +75,7 @@ def test_install_runtests_root(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests == (pkg.name == 'dttop')
- monkeypatch.setattr(spack.package.PackageBase, 'unit_test_check', check)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', check)
install('--test=root', 'dttop')
@@ -84,7 +84,7 @@ def test_install_runtests_all(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests
- monkeypatch.setattr(spack.package.PackageBase, 'unit_test_check', check)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', check)
install('--test=all', 'a')
@@ -257,7 +257,7 @@ def test_install_commit(
"""
repo_path, filename, commits = mock_git_version_info
- monkeypatch.setattr(spack.package.PackageBase,
+ monkeypatch.setattr(spack.package_base.PackageBase,
'git', 'file://%s' % repo_path,
raising=False)
diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py
index b868c62c66..e43fca0535 100644
--- a/lib/spack/spack/test/cmd/pkg.py
+++ b/lib/spack/spack/test/cmd/pkg.py
@@ -22,7 +22,7 @@ pytestmark = pytest.mark.skipif(not which('git'),
#: new fake package template
pkg_template = '''\
-from spack import *
+from spack.package import *
class {name}(Package):
homepage = "http://www.example.com"
diff --git a/lib/spack/spack/test/cmd/stage.py b/lib/spack/spack/test/cmd/stage.py
index b7120a5a95..5c586a11b0 100644
--- a/lib/spack/spack/test/cmd/stage.py
+++ b/lib/spack/spack/test/cmd/stage.py
@@ -28,7 +28,7 @@ def test_stage_spec(monkeypatch):
def fake_stage(pkg, mirror_only=False):
expected.remove(pkg.name)
- monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
stage('trivial-install-test-package', 'mpileaks')
@@ -42,7 +42,7 @@ def check_stage_path(monkeypatch, tmpdir):
def fake_stage(pkg, mirror_only=False):
assert pkg.path == expected_path
- monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
return expected_path
@@ -69,7 +69,7 @@ def test_stage_with_env_outside_env(mutable_mock_env_path, monkeypatch):
assert pkg.name == 'trivial-install-test-package'
assert pkg.path is None
- monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
e = ev.create('test')
e.add('mpileaks')
@@ -87,7 +87,7 @@ def test_stage_with_env_inside_env(mutable_mock_env_path, monkeypatch):
assert pkg.name == 'mpileaks'
assert pkg.version == Version('100.100')
- monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
e = ev.create('test')
e.add('mpileaks@100.100')
@@ -115,7 +115,7 @@ def test_stage_full_env(mutable_mock_env_path, monkeypatch):
def fake_stage(pkg, mirror_only=False):
expected.remove(pkg.name)
- monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
with e:
stage()
diff --git a/lib/spack/spack/test/cmd/style.py b/lib/spack/spack/test/cmd/style.py
index 4269f0bd28..8b70a791dc 100644
--- a/lib/spack/spack/test/cmd/style.py
+++ b/lib/spack/spack/test/cmd/style.py
@@ -76,7 +76,9 @@ def flake8_package_with_errors(scope="function"):
package = FileFilter(filename)
package.filter("state = 'unmodified'", "state = 'modified'", string=True)
package.filter(
- "from spack import *", "from spack import *\nimport os", string=True
+ "from spack.package import *",
+ "from spack.package import *\nimport os",
+ string=True
)
yield filename
finally:
@@ -273,17 +275,17 @@ def test_style(flake8_package, tmpdir):
relative = os.path.relpath(flake8_package)
# no args
- output = style()
+ output = style(fail_on_error=False)
assert relative in output
assert "spack style checks were clean" in output
# one specific arg
- output = style(flake8_package)
+ output = style(flake8_package, fail_on_error=False)
assert relative in output
assert "spack style checks were clean" in output
# specific file that isn't changed
- output = style(__file__)
+ output = style(__file__, fail_on_error=False)
assert relative not in output
assert __file__ in output
assert "spack style checks were clean" in output
diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py
index 0278ca344c..0555dc6ca1 100644
--- a/lib/spack/spack/test/cmd/test.py
+++ b/lib/spack/spack/test/cmd/test.py
@@ -13,7 +13,7 @@ from llnl.util.filesystem import copy_tree
import spack.cmd.install
import spack.config
-import spack.package
+import spack.package_base
import spack.paths
import spack.store
from spack.main import SpackCommand
@@ -239,7 +239,7 @@ def test_test_list(
reason="Not supported on Windows (yet)")
def test_has_test_method_fails(capsys):
with pytest.raises(SystemExit):
- spack.package.has_test_method('printing-package')
+ spack.package_base.has_test_method('printing-package')
captured = capsys.readouterr()[1]
assert 'is not a class' in captured
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 7298fc804f..089a073e09 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -35,7 +35,7 @@ import spack.config
import spack.database
import spack.directory_layout
import spack.environment as ev
-import spack.package
+import spack.package_base
import spack.package_prefs
import spack.paths
import spack.platforms
@@ -532,7 +532,7 @@ def _pkg_install_fn(pkg, spec, prefix):
@pytest.fixture
def mock_pkg_install(monkeypatch):
- monkeypatch.setattr(spack.package.PackageBase, 'install',
+ monkeypatch.setattr(spack.package_base.PackageBase, 'install',
_pkg_install_fn, raising=False)
@@ -934,7 +934,7 @@ def mock_fetch(mock_archive, monkeypatch):
mock_fetcher.append(URLFetchStrategy(mock_archive.url))
monkeypatch.setattr(
- spack.package.PackageBase, 'fetcher', mock_fetcher)
+ spack.package_base.PackageBase, 'fetcher', mock_fetcher)
class MockLayout(object):
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 0315857f1d..e61a46cbc5 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -29,7 +29,7 @@ import llnl.util.lock as lk
from llnl.util.tty.colify import colify
import spack.database
-import spack.package
+import spack.package_base
import spack.repo
import spack.spec
import spack.store
@@ -792,7 +792,7 @@ def test_uninstall_by_spec(mutable_database):
with mutable_database.write_transaction():
for spec in mutable_database.query():
if spec.installed:
- spack.package.PackageBase.uninstall_by_spec(spec, force=True)
+ spack.package_base.PackageBase.uninstall_by_spec(spec, force=True)
else:
mutable_database.remove(spec)
assert len(mutable_database.query()) == 0
diff --git a/lib/spack/spack/test/flag_handlers.py b/lib/spack/spack/test/flag_handlers.py
index f40b8ea14a..97a0ec171c 100644
--- a/lib/spack/spack/test/flag_handlers.py
+++ b/lib/spack/spack/test/flag_handlers.py
@@ -10,7 +10,7 @@ import pytest
import spack.build_environment
import spack.repo
import spack.spec
-from spack.pkgkit import build_system_flags, env_flags, inject_flags
+from spack.package import build_system_flags, env_flags, inject_flags
@pytest.fixture()
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index 599087ac78..42c68fd6bd 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -15,7 +15,7 @@ import spack.patch
import spack.repo
import spack.store
import spack.util.spack_json as sjson
-from spack.package import (
+from spack.package_base import (
InstallError,
PackageBase,
PackageStillNeededError,
@@ -238,7 +238,7 @@ def test_flatten_deps(
assert dependency_name not in os.listdir(pkg.prefix)
# Flatten the dependencies and ensure the dependency directory is there.
- spack.package.flatten_dependencies(spec, pkg.prefix)
+ spack.package_base.flatten_dependencies(spec, pkg.prefix)
dependency_dir = os.path.join(pkg.prefix, dependency_name)
assert os.path.isdir(dependency_dir)
@@ -322,7 +322,7 @@ def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch):
# If remove_prefix is called at any point in this test, that is an
# error
pkg.succeed = False # make the build fail
- monkeypatch.setattr(spack.package.Package, 'remove_prefix', mock_remove_prefix)
+ monkeypatch.setattr(spack.package_base.Package, 'remove_prefix', mock_remove_prefix)
with pytest.raises(spack.build_environment.ChildError):
pkg.do_install(keep_prefix=True)
assert os.path.exists(pkg.prefix)
@@ -340,7 +340,7 @@ def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch):
def test_second_install_no_overwrite_first(install_mockery, mock_fetch, monkeypatch):
spec = Spec('canfail').concretized()
pkg = spack.repo.get(spec)
- monkeypatch.setattr(spack.package.Package, 'remove_prefix', mock_remove_prefix)
+ monkeypatch.setattr(spack.package_base.Package, 'remove_prefix', mock_remove_prefix)
pkg.succeed = True
pkg.do_install()
diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py
index b0e768a387..f228ec2d77 100644
--- a/lib/spack/spack/test/installer.py
+++ b/lib/spack/spack/test/installer.py
@@ -69,7 +69,8 @@ def create_build_task(pkg, install_args={}):
Create a built task for the given (concretized) package
Args:
- pkg (spack.package.PackageBase): concretized package associated with the task
+ pkg (spack.package_base.PackageBase): concretized package associated with
+ the task
install_args (dict): dictionary of kwargs (or install args)
Return:
@@ -716,7 +717,7 @@ def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
task.compiler = True
# Preclude any meaningful side-effects
- monkeypatch.setattr(spack.package.PackageBase, 'unit_test_check', _true)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', _true)
monkeypatch.setattr(inst.PackageInstaller, '_setup_install_dir', _noop)
monkeypatch.setattr(spack.build_environment, 'start_build_process', _noop)
monkeypatch.setattr(spack.database.Database, 'add', _noop)
@@ -1049,7 +1050,7 @@ def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
# This will prevent b from installing, which will cause the build of a
# to be skipped.
monkeypatch.setattr(
- spack.package.PackageBase,
+ spack.package_base.PackageBase,
'do_patch',
_test_install_fail_fast_on_except_patch
)
diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py
index b3afa57f9a..909cf4193f 100644
--- a/lib/spack/spack/test/package_class.py
+++ b/lib/spack/spack/test/package_class.py
@@ -17,7 +17,7 @@ import pytest
import llnl.util.filesystem as fs
-import spack.package
+import spack.package_base
import spack.repo
@@ -80,7 +80,7 @@ def test_possible_dependencies_virtual(mock_packages, mpi_names):
# only one mock MPI has a dependency
expected['fake'] = set()
- assert expected == spack.package.possible_dependencies(
+ assert expected == spack.package_base.possible_dependencies(
"mpi", transitive=False)
@@ -125,7 +125,7 @@ def test_possible_dependencies_with_multiple_classes(
'dt-diamond-bottom': set(),
})
- assert expected == spack.package.possible_dependencies(*pkgs)
+ assert expected == spack.package_base.possible_dependencies(*pkgs)
def setup_install_test(source_paths, install_test_root):
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index ed01e410f8..dabe768b37 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -16,7 +16,7 @@ import llnl.util.tty as tty
# do sanity checks only on packagess modified by a PR
import spack.cmd.style as style
import spack.fetch_strategy
-import spack.package
+import spack.package_base
import spack.paths
import spack.repo
import spack.util.crypto as crypto
@@ -265,7 +265,7 @@ def test_all_dependencies_exist():
"""Make sure no packages have nonexisting dependencies."""
missing = {}
pkgs = [pkg for pkg in spack.repo.path.all_package_names()]
- spack.package.possible_dependencies(
+ spack.package_base.possible_dependencies(
*pkgs, transitive=True, missing=missing)
lines = [
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index e403d85899..5d2928115e 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -135,10 +135,10 @@ def test_urls_for_versions(mock_packages, config):
def test_url_for_version_with_no_urls(mock_packages, config):
pkg = spack.repo.get('git-test')
- with pytest.raises(spack.package.NoURLError):
+ with pytest.raises(spack.package_base.NoURLError):
pkg.url_for_version('1.0')
- with pytest.raises(spack.package.NoURLError):
+ with pytest.raises(spack.package_base.NoURLError):
pkg.url_for_version('1.1')
@@ -377,7 +377,7 @@ def test_fetch_options(mock_packages, config):
def test_has_test_method_fails(capsys):
with pytest.raises(SystemExit):
- spack.package.has_test_method('printing-package')
+ spack.package_base.has_test_method('printing-package')
captured = capsys.readouterr()[1]
assert 'is not a class' in captured
diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py
index ff68418ac4..63a3cf9a3e 100644
--- a/lib/spack/spack/test/packaging.py
+++ b/lib/spack/spack/test/packaging.py
@@ -21,7 +21,7 @@ from llnl.util.symlink import symlink
import spack.binary_distribution as bindist
import spack.cmd.buildcache as buildcache
-import spack.package
+import spack.package_base
import spack.repo
import spack.store
import spack.util.gpg
@@ -575,10 +575,10 @@ def mock_download():
def fake_fn(self):
return fetcher
- orig_fn = spack.package.PackageBase.fetcher
- spack.package.PackageBase.fetcher = fake_fn
+ orig_fn = spack.package_base.PackageBase.fetcher
+ spack.package_base.PackageBase.fetcher = fake_fn
yield
- spack.package.PackageBase.fetcher = orig_fn
+ spack.package_base.PackageBase.fetcher = orig_fn
@pytest.mark.parametrize("manual,instr", [(False, False), (False, True),
@@ -598,7 +598,7 @@ def test_manual_download(install_mockery, mock_download, monkeypatch, manual,
pkg.manual_download = manual
if instr:
- monkeypatch.setattr(spack.package.PackageBase, 'download_instr',
+ monkeypatch.setattr(spack.package_base.PackageBase, 'download_instr',
_instr)
expected = pkg.download_instr if manual else 'All fetchers failed'
@@ -616,7 +616,7 @@ def fetching_not_allowed(monkeypatch):
raise Exception("Sources are fetched but shouldn't have been")
fetcher = FetchStrategyComposite()
fetcher.append(FetchingNotAllowed())
- monkeypatch.setattr(spack.package.PackageBase, 'fetcher', fetcher)
+ monkeypatch.setattr(spack.package_base.PackageBase, 'fetcher', fetcher)
def test_fetch_without_code_is_noop(install_mockery, fetching_not_allowed):
diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py
index 98b202635d..5dfa9597fc 100644
--- a/lib/spack/spack/test/repo.py
+++ b/lib/spack/spack/test/repo.py
@@ -7,7 +7,7 @@ import sys
import pytest
-import spack.package
+import spack.package_base
import spack.paths
import spack.repo
@@ -116,11 +116,12 @@ def test_absolute_import_spack_packages_as_python_modules(mock_packages):
assert hasattr(spack.pkg.builtin.mock, 'mpileaks')
assert hasattr(spack.pkg.builtin.mock.mpileaks, 'Mpileaks')
assert isinstance(spack.pkg.builtin.mock.mpileaks.Mpileaks,
- spack.package.PackageMeta)
- assert issubclass(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package.Package)
+ spack.package_base.PackageMeta)
+ assert issubclass(spack.pkg.builtin.mock.mpileaks.Mpileaks,
+ spack.package_base.Package)
def test_relative_import_spack_packages_as_python_modules(mock_packages):
from spack.pkg.builtin.mock.mpileaks import Mpileaks
- assert isinstance(Mpileaks, spack.package.PackageMeta)
- assert issubclass(Mpileaks, spack.package.Package)
+ assert isinstance(Mpileaks, spack.package_base.PackageMeta)
+ assert issubclass(Mpileaks, spack.package_base.Package)
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 8fc137008c..566d9ccb8d 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -8,7 +8,7 @@ These tests check Spec DAG operations using dummy packages.
import pytest
import spack.error
-import spack.package
+import spack.package_base
import spack.util.hash as hashutil
from spack.dependency import Dependency, all_deptypes, canonical_deptype
from spack.spec import Spec
diff --git a/lib/spack/spack/test/test_activations.py b/lib/spack/spack/test/test_activations.py
index a832236c49..a4439f9596 100644
--- a/lib/spack/spack/test/test_activations.py
+++ b/lib/spack/spack/test/test_activations.py
@@ -14,7 +14,7 @@ import pytest
from llnl.util.link_tree import MergeConflictError
-import spack.package
+import spack.package_base
import spack.spec
from spack.directory_layout import DirectoryLayout
from spack.filesystem_view import YamlFilesystemView
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index dcffcf379f..c8e7e45196 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -14,7 +14,7 @@ import pytest
from llnl.util.filesystem import working_dir
-import spack.package
+import spack.package_base
import spack.spec
from spack.util.executable import which
from spack.version import Version, VersionList, VersionRange, ver
@@ -590,7 +590,7 @@ def test_invalid_versions(version_str):
reason="Not supported on Windows (yet)")
def test_versions_from_git(mock_git_version_info, monkeypatch, mock_packages):
repo_path, filename, commits = mock_git_version_info
- monkeypatch.setattr(spack.package.PackageBase, 'git', 'file://%s' % repo_path,
+ monkeypatch.setattr(spack.package_base.PackageBase, 'git', 'file://%s' % repo_path,
raising=False)
for commit in commits:
@@ -614,7 +614,7 @@ def test_git_hash_comparisons(
"""Check that hashes compare properly to versions
"""
repo_path, filename, commits = mock_git_version_info
- monkeypatch.setattr(spack.package.PackageBase,
+ monkeypatch.setattr(spack.package_base.PackageBase,
'git', 'file://%s' % repo_path,
raising=False)
diff --git a/lib/spack/spack/util/mock_package.py b/lib/spack/spack/util/mock_package.py
index b89b989311..29f9cb472c 100644
--- a/lib/spack/spack/util/mock_package.py
+++ b/lib/spack/spack/util/mock_package.py
@@ -15,7 +15,7 @@ __all__ = ["MockPackageMultiRepo"]
class MockPackageBase(object):
- """Internal base class for mocking ``spack.package.PackageBase``.
+ """Internal base class for mocking ``spack.package_base.PackageBase``.
Use ``MockPackageMultiRepo.add_package()`` to create new instances.
diff --git a/lib/spack/spack/util/package_hash.py b/lib/spack/spack/util/package_hash.py
index 8340a9eab7..7b97649eb5 100644
--- a/lib/spack/spack/util/package_hash.py
+++ b/lib/spack/spack/util/package_hash.py
@@ -7,7 +7,7 @@ import ast
import spack.directives
import spack.error
-import spack.package
+import spack.package_base
import spack.repo
import spack.spec
import spack.util.hash
@@ -63,7 +63,7 @@ class RemoveDirectives(ast.NodeTransformer):
# list of URL attributes and metadata attributes
# these will be removed from packages.
self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies]
- self.metadata_attrs += spack.package.Package.metadata_attrs
+ self.metadata_attrs += spack.package_base.Package.metadata_attrs
self.spec = spec
self.in_classdef = False # used to avoid nested classdefs
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 4bd0d92586..a137c5f6b3 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -594,7 +594,7 @@ def find_versions_of_archive(
list_depth (int): max depth to follow links on list_url pages.
Defaults to 0.
concurrency (int): maximum number of concurrent requests
- reference_package (spack.package.Package or None): a spack package
+ reference_package (spack.package_base.Package or None): a spack package
used as a reference for url detection. Uses the url_for_version
method on the package to produce reference urls which, if found,
are preferred.
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index 4ec016830e..aad453bb11 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -95,7 +95,7 @@ class Variant(object):
Args:
vspec (Variant): instance to be validated
- pkg (spack.package.Package): the package that required the validation,
+ pkg (spack.package_base.Package): the package that required the validation,
if available
Raises:
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 495cfee6e3..5fa7793da2 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -177,6 +177,7 @@ class Version(object):
]
def __init__(self, string):
+ # type: (str) -> None
if not isinstance(string, str):
string = str(string)