summaryrefslogtreecommitdiff
path: root/var
diff options
context:
space:
mode:
authorDominik Dold <34650920+dodo47@users.noreply.github.com>2019-12-06 01:48:20 +0100
committerAdam J. Stewart <ajstewart426@gmail.com>2019-12-05 18:48:20 -0600
commitc3eafde7bf180e94bed6789b5dd41cb3ee240731 (patch)
tree77673c16efe703106e1522a514188023c81dcfc8 /var
parenta288449f0b23acbec128bab943e42b403f4f4df9 (diff)
downloadspack-c3eafde7bf180e94bed6789b5dd41cb3ee240731.tar.gz
spack-c3eafde7bf180e94bed6789b5dd41cb3ee240731.tar.bz2
spack-c3eafde7bf180e94bed6789b5dd41cb3ee240731.tar.xz
spack-c3eafde7bf180e94bed6789b5dd41cb3ee240731.zip
Add new TensorFlow package (#13112)
* add tensorflow Change-Id: Id778c68d148cc42f0b478a9d10a8f937cb54cdc6 * make bazel and tensorflow build Change-Id: Iae9005e8f4dcc8f1ed36ea9337d2430aeebb291f * fix flake8 Change-Id: Ib05529dd796eab4a8855a5d7775cc4efea8e479d * 2nd flake8 attempt Change-Id: I46224be3a374b2a65793048b0c5178ea64adbd78 * replace md5 sums with sha256 * add version 1.13.2 * bazel() -> bazel('build',... * specify versions of bazel dependency * build with CUDA * add TODOs * add more todo"s * improve enum34 dependency * py-future is a dependency as of v1.14 * Update var/spack/repos/builtin/packages/tensorflow/package.py Co-Authored-By: Adam J. Stewart <ajstewart426@gmail.com> * Update var/spack/repos/builtin/packages/tensorflow/package.py Co-Authored-By: Adam J. Stewart <ajstewart426@gmail.com> * Update var/spack/repos/builtin/packages/tensorflow/package.py Co-Authored-By: Adam J. Stewart <ajstewart426@gmail.com> * Update var/spack/repos/builtin/packages/tensorflow/package.py Co-Authored-By: Adam J. Stewart <ajstewart426@gmail.com> * enable nccl, cuda by default * explain patches * add todo * remove unnecessary copt_flag * use join * join argument must be an iterable * split long line; use same opts for non-cuda build * without opt flags, configure hangs * introduce build phases; re-arrange * undo mistake * restore unset tmp_path * as of v1.14, nccl_install_path is parsed correctly, hence change ...prefix.lib to ...prefix * now, version 1.14 compiles successfully with cuda * add version 2.1.0 * specify bazel dependency for version 2.1.0-rc0 * account for deprecated bazel opts for v2.1.0-rc0 * disable mkldnn contraction kernel * Flake8 fixes * md5 -> sha256 * Fix TF and TF-estimator version deps * Don't just comment out patch * Add myself as a maintainer * Patch py-astor to support newer py-setuptools * Add more versions and bazel version constraints * Add a build phase * Add note about configure interactivity * dev-build -> build-env * Disable iOS build * Use correct optimization flags * Add variants for all possible features * nccl isn't always a dependency * Specify correct dependency versions for each release * Libs may not be in lib or lib64 * Add py-opt-einsum package * Add newer version of py-protobuf * Add newer version of py-wrapt * Fix Python 2.6 syntax error * Code review * Set more env vars for older versions * Add more env vars, fix bazel versions, add conflicts * Fix config options * Specify version that support --config args * Add py-future dependency for Python 2 * Fix cuda config flag and compute capabilities * Fix installation on macOS, add unit tests * Override cuda variant default to True on non-macOS * Rename tensorflow to py-tensorflow * Has to extend something * Fix os.symlink call * convert cuda_arc values to capabilities * restore nccl prefix path for v1.13.1 * Revert to v2 * Remove extraneous period * Add new version of jdk/openjdk * More stable cuda_arch formatting * Fix bazel unit tests * Fix symlinking * Fix unit tests * +gcp by default until build error figured out
Diffstat (limited to 'var')
-rw-r--r--var/spack/repos/builtin/packages/bazel/package.py12
-rw-r--r--var/spack/repos/builtin/packages/icedtea/package.py1
-rw-r--r--var/spack/repos/builtin/packages/jdk/package.py2
-rw-r--r--var/spack/repos/builtin/packages/openjdk/package.py28
-rw-r--r--var/spack/repos/builtin/packages/py-astor/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-backports-weakref/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-gast/package.py1
-rw-r--r--var/spack/repos/builtin/packages/py-opt-einsum/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-pep8/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-protobuf/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-pytest-cache/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-pytest-pep8/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py47
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow/crosstool.patch21
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow/http_archive.patch57
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow/io_bazel_rules_docker2.patch18
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow/package.py698
-rw-r--r--var/spack/repos/builtin/packages/py-tensorflow/url-zlib.patch13
-rw-r--r--var/spack/repos/builtin/packages/py-wrapt/package.py3
19 files changed, 978 insertions, 43 deletions
diff --git a/var/spack/repos/builtin/packages/bazel/package.py b/var/spack/repos/builtin/packages/bazel/package.py
index 695ed800df..b981fe409b 100644
--- a/var/spack/repos/builtin/packages/bazel/package.py
+++ b/var/spack/repos/builtin/packages/bazel/package.py
@@ -18,6 +18,7 @@ class Bazel(Package):
maintainers = ['adamjstewart']
+ version('1.2.1', sha256='255da49d0f012bc4f2c1d6d3ccdbe578e22fe97b8d124e1629a486fe2a09d3e1')
version('1.2.0', sha256='9cb46b0a18b9166730307a0e82bf4c02281a1cc6da0fb11239e6fe4147bdee6e')
version('1.1.0', sha256='4b66a8c93af7832ed32e7236cf454a05f3aa06d25a8576fc3f83114f142f95ab')
version('1.0.1', sha256='f4d2dfad011ff03a5fae41b9b02cd96cd7297c1205d496603d66516934fbcfee')
@@ -122,7 +123,9 @@ class Bazel(Package):
return url.format(version)
def setup_build_environment(self, env):
- env.set('EXTRA_BAZEL_ARGS', '--host_javabase=@local_jdk//:jdk')
+ env.set('EXTRA_BAZEL_ARGS',
+ # Spack's logs don't handle colored output well
+ '--color=no --host_javabase=@local_jdk//:jdk')
def bootstrap(self, spec, prefix):
bash = which('bash')
@@ -137,7 +140,8 @@ class Bazel(Package):
def test(self):
# https://github.com/Homebrew/homebrew-core/blob/master/Formula/bazel.rb
- with working_dir('spack-test', create=True):
+ # Bazel does not work properly on NFS, switch to /tmp
+ with working_dir('/tmp/spack/bazel/spack-test', create=True):
touch('WORKSPACE')
with open('ProjectRunner.java', 'w') as f:
@@ -156,8 +160,10 @@ java_binary(
main_class = "ProjectRunner",
)""")
+ # Spack's logs don't handle colored output well
bazel = Executable(self.prefix.bin.bazel)
- bazel('build', '//:bazel-test')
+ bazel('--output_user_root=/tmp/spack/bazel/spack-test',
+ 'build', '--color=no', '//:bazel-test')
exe = Executable('bazel-bin/bazel-test')
assert exe(output=str) == 'Hi!\n'
diff --git a/var/spack/repos/builtin/packages/icedtea/package.py b/var/spack/repos/builtin/packages/icedtea/package.py
index a367d92223..ad12a14678 100644
--- a/var/spack/repos/builtin/packages/icedtea/package.py
+++ b/var/spack/repos/builtin/packages/icedtea/package.py
@@ -62,7 +62,6 @@ class Icedtea(AutotoolsPackage):
depends_on('zlib')
depends_on('alsa-lib')
- provides('java')
provides('java@8', when='@3.4.0:3.99.99')
force_autoreconf = True
diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py
index 366af6907f..be468be64e 100644
--- a/var/spack/repos/builtin/packages/jdk/package.py
+++ b/var/spack/repos/builtin/packages/jdk/package.py
@@ -48,6 +48,8 @@ class Jdk(Package):
url='http://download.oracle.com/otn-pub/java/jdk/10.0.2+13/19aef61b38124481863b1413dce1855f/jdk-10.0.2_linux-x64_bin.tar.gz')
version('10.0.1_10', sha256='ae8ed645e6af38432a56a847597ac61d4283b7536688dbab44ab536199d1e5a4', curl_options=curl_options,
url='http://download.oracle.com/otn-pub/java/jdk/10.0.1+10/fb4372174a714e6b8c52526dc134031e/jdk-10.0.1_linux-x64_bin.tar.gz')
+ version('1.8.0_231-b11', sha256='a011584a2c9378bf70c6903ef5fbf101b30b08937441dc2ec67932fb3620b2cf', curl_options=curl_options,
+ url='https://download.oracle.com/otn/java/jdk/8u231-b11/5b13a193868b4bf28bcb45c792fce896/jdk-8u231-linux-x64.tar.gz')
version('1.8.0_212-b10', sha256='3160c50aa8d8e081c8c7fe0f859ea452922eca5d2ae8f8ef22011ae87e6fedfb', curl_options=curl_options,
url='https://download.oracle.com/otn/java/jdk/8u212-b10/59066701cf1a433da9770636fbc4c9aa/jdk-8u212-linux-x64.tar.gz')
version('1.8.0_202-b08', sha256='9a5c32411a6a06e22b69c495b7975034409fa1652d03aeb8eb5b6f59fd4594e0', curl_options=curl_options,
diff --git a/var/spack/repos/builtin/packages/openjdk/package.py b/var/spack/repos/builtin/packages/openjdk/package.py
index 6329fce753..76fa7c9043 100644
--- a/var/spack/repos/builtin/packages/openjdk/package.py
+++ b/var/spack/repos/builtin/packages/openjdk/package.py
@@ -12,25 +12,15 @@ class Openjdk(Package):
homepage = "https://jdk.java.net"
- version(
- "11.0.2",
- sha256="99be79935354f5c0df1ad293620ea36d13f48ec3ea870c838f20c504c9668b57",
- url="https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz",
- )
-
- version(
- "11.0.1",
- sha256="7a6bb980b9c91c478421f865087ad2d69086a0583aeeb9e69204785e8e97dcfd",
- url="https://download.java.net/java/GA/jdk11/13/GPL/openjdk-11.0.1_linux-x64_bin.tar.gz",
- )
-
- version(
- "1.8.0_202-b08",
- sha256="533dcd8d9ca15df231a1eb392fa713a66bca85a8e76d9b4ee30975f3823636b7",
- url="https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u202-b08/OpenJDK8U-jdk_x64_linux_openj9_8u202b08_openj9-0.12.0.tar.gz",
- )
-
- provides('java')
+ version("11.0.2", sha256="99be79935354f5c0df1ad293620ea36d13f48ec3ea870c838f20c504c9668b57",
+ url="https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz")
+ version("11.0.1", sha256="7a6bb980b9c91c478421f865087ad2d69086a0583aeeb9e69204785e8e97dcfd",
+ url="https://download.java.net/java/GA/jdk11/13/GPL/openjdk-11.0.1_linux-x64_bin.tar.gz")
+ version("1.8.0_202-b08", sha256="533dcd8d9ca15df231a1eb392fa713a66bca85a8e76d9b4ee30975f3823636b7",
+ url="https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u202-b08/OpenJDK8U-jdk_x64_linux_openj9_8u202b08_openj9-0.12.0.tar.gz")
+ version('1.8.0_40-b25', sha256='79e96dce03a14271040023231a7d0ae374b755d48adf68bbdaec30294e4e2b88',
+ url='https://download.java.net/openjdk/jdk8u40/ri/jdk_ri-8u40-b25-linux-x64-10_feb_2015.tar.gz')
+
provides('java@11', when='@11.0:11.99')
provides('java@8', when='@1.8.0:1.8.999')
diff --git a/var/spack/repos/builtin/packages/py-astor/package.py b/var/spack/repos/builtin/packages/py-astor/package.py
index 7a8ad9dc29..bcda4dc43e 100644
--- a/var/spack/repos/builtin/packages/py-astor/package.py
+++ b/var/spack/repos/builtin/packages/py-astor/package.py
@@ -14,11 +14,16 @@ class PyAstor(PythonPackage):
url = "https://pypi.io/packages/source/a/astor/astor-0.8.0.tar.gz"
version('0.8.0', sha256='37a6eed8b371f1228db08234ed7f6cfdc7817a3ed3824797e20cbb11dc2a7862')
- version('0.6', sha256='175ec395cde36aa0178c5a3120d03954c65d1ef4bb19ec4aa30e9d7a7cc426c4')
+ version('0.6', sha256='175ec395cde36aa0178c5a3120d03954c65d1ef4bb19ec4aa30e9d7a7cc426c4')
- depends_on('python@2.7:2.8,3.4:')
- # Build fails with py-setuptools@41.4.0
- # https://github.com/berkerpeksag/astor/issues/162
- depends_on('py-setuptools@:41.3', type='build')
+ depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
depends_on('py-nose', type='test')
depends_on('py-astunparse', type='test')
+
+ # Build fails with newer versions of setuptools
+ # https://github.com/berkerpeksag/astor/issues/162
+ # https://github.com/berkerpeksag/astor/pull/163
+ patch('https://github.com/berkerpeksag/astor/pull/163/commits/30059dac4eb832e58ab2109db84508b294ba366d.patch',
+ sha256='edc5eeddabe153b08e938f52edaeb2d880ee3128082967f310db0f98510fe6e0',
+ when='@0.8.0')
diff --git a/var/spack/repos/builtin/packages/py-backports-weakref/package.py b/var/spack/repos/builtin/packages/py-backports-weakref/package.py
index 566a0841a0..d7f2700f9d 100644
--- a/var/spack/repos/builtin/packages/py-backports-weakref/package.py
+++ b/var/spack/repos/builtin/packages/py-backports-weakref/package.py
@@ -12,6 +12,7 @@ class PyBackportsWeakref(PythonPackage):
homepage = "https://github.com/PiDelport/backports.weakref"
url = "https://pypi.org/packages/source/b/backports.weakref/backports.weakref-1.0.post1.tar.gz"
- version('1.0.post1', sha256='bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2')
+ version('1.0.post1', sha256='bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2', preferred=True)
+ version('1.0rc1', sha256='8813bf712a66b3d8b85dc289e1104ed220f1878cf981e2fe756dfaabe9a82892')
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-gast/package.py b/var/spack/repos/builtin/packages/py-gast/package.py
index 05d42822af..a554e47893 100644
--- a/var/spack/repos/builtin/packages/py-gast/package.py
+++ b/var/spack/repos/builtin/packages/py-gast/package.py
@@ -13,6 +13,7 @@ class PyGast(PythonPackage):
url = "https://pypi.io/packages/source/g/gast/gast-0.3.2.tar.gz"
version('0.3.2', sha256='5c7617f1f6c8b8b426819642b16b9016727ddaecd16af9a07753e537eba8a3a5')
+ version('0.2.2', sha256='fe939df4583692f0512161ec1c880e0a10e71e6a232da045ab8edd3756fbadf0')
version('0.2.0', sha256='7068908321ecd2774f145193c4b34a11305bd104b4551b09273dfd1d6a374930')
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-opt-einsum/package.py b/var/spack/repos/builtin/packages/py-opt-einsum/package.py
new file mode 100644
index 0000000000..83475e3d0d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-opt-einsum/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyOptEinsum(PythonPackage):
+ """Optimized Einsum: A tensor contraction order optimizer."""
+
+ homepage = "https://github.com/dgasmith/opt_einsum"
+ url = "https://pypi.io/packages/source/o/opt_einsum/opt_einsum-3.1.0.tar.gz"
+
+ version('3.1.0', sha256='edfada4b1d0b3b782ace8bc14e80618ff629abf53143e1e6bbf9bd00b11ece77')
+
+ depends_on('python@3.5:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.7:', type=('build', 'run'))
+ depends_on('py-pytest', type='test')
+ depends_on('py-pytest-cov', type='test')
+ depends_on('py-pytest-pep8', type='test')
diff --git a/var/spack/repos/builtin/packages/py-pep8/package.py b/var/spack/repos/builtin/packages/py-pep8/package.py
new file mode 100644
index 0000000000..d1ea5746eb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pep8/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPep8(PythonPackage):
+ """Python style guide checker (deprecated, use py-pycodestyle instead)."""
+
+ homepage = "https://pep8.readthedocs.org/"
+ url = "https://pypi.io/packages/source/p/pep8/pep8-1.7.1.tar.gz"
+
+ import_modules = ['pep8']
+
+ version('1.7.1', sha256='fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374')
+
+ depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-protobuf/package.py b/var/spack/repos/builtin/packages/py-protobuf/package.py
index 27b1e8093b..eef30041c6 100644
--- a/var/spack/repos/builtin/packages/py-protobuf/package.py
+++ b/var/spack/repos/builtin/packages/py-protobuf/package.py
@@ -15,22 +15,28 @@ class PyProtobuf(PythonPackage):
and using a variety of languages."""
homepage = 'https://developers.google.com/protocol-buffers/'
- url = 'https://pypi.io/packages/source/p/protobuf/protobuf-3.7.1.tar.gz'
+ url = 'https://pypi.io/packages/source/p/protobuf/protobuf-3.11.0.tar.gz'
variant('cpp', default=False,
description='Enable the cpp implementation')
- version('3.7.1', sha256='21e395d7959551e759d604940a115c51c6347d90a475c9baf471a1a86b5604a9')
- version('3.6.1', sha256='1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811')
- version('3.6.0', sha256='a37836aa47d1b81c2db1a6b7a5e79926062b5d76bd962115a0e615551be2b48d')
- version('3.5.2', sha256='09879a295fd7234e523b62066223b128c5a8a88f682e3aff62fb115e4a0d8be0')
- version('3.5.1', sha256='95b78959572de7d7fafa3acb718ed71f482932ddddddbd29ba8319c10639d863')
- version('3.4.0', sha256='ef02609ef445987976a3a26bff77119c518e0915c96661c3a3b17856d0ef6374')
- version('3.3.0', sha256='1cbcee2c45773f57cb6de7ee0eceb97f92b9b69c0178305509b162c0160c1f04')
- version('2.6.1', sha256='8faca1fb462ee1be58d00f5efb4ca4f64bde92187fe61fde32615bbee7b3e745')
- version('2.5.0', sha256='58292c459598c9297258bf57acc055f701c727f0154a86af8c0947dde37d8172')
- version('2.4.1', sha256='df30b98acb6ef892da8b4776175510cff2131908fd0526b6bad960c55a830a1b')
- version('2.3.0', sha256='374bb047874a506507912c3717d0ce62affbaa9a22bcb494d63d60326a0867b5')
+ version('3.11.0', sha256='97b08853b9bb71512ed52381f05cf2d4179f4234825b505d8f8d2bb9d9429939')
+ version('3.7.1', sha256='21e395d7959551e759d604940a115c51c6347d90a475c9baf471a1a86b5604a9')
+ version('3.6.1', sha256='1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811')
+ version('3.6.0', sha256='a37836aa47d1b81c2db1a6b7a5e79926062b5d76bd962115a0e615551be2b48d')
+ version('3.5.2', sha256='09879a295fd7234e523b62066223b128c5a8a88f682e3aff62fb115e4a0d8be0')
+ version('3.5.1', sha256='95b78959572de7d7fafa3acb718ed71f482932ddddddbd29ba8319c10639d863')
+ version('3.4.0', sha256='ef02609ef445987976a3a26bff77119c518e0915c96661c3a3b17856d0ef6374')
+ version('3.3.0', sha256='1cbcee2c45773f57cb6de7ee0eceb97f92b9b69c0178305509b162c0160c1f04')
+ version('3.1.0', sha256='0bc10bfd00a9614fae58c86c21fbcf339790e48accf6d45f098034de985f5405',
+ url='https://github.com/protocolbuffers/protobuf/releases/download/v3.1.0/protobuf-python-3.1.0.tar.gz')
+ version('3.0.0', sha256='ecc40bc30f1183b418fe0ec0c90bc3b53fa1707c4205ee278c6b90479e5b6ff5')
+ version('3.0.0b2', sha256='d5b560bbc4b7d97cc2455c05cad9299d9db02d7bd11193b05684e3a86303c229')
+ version('3.0.0a3', sha256='b61622de5048415bfd3f2d812ad64606438ac9e25009ae84191405fe58e522c1')
+ version('2.6.1', sha256='8faca1fb462ee1be58d00f5efb4ca4f64bde92187fe61fde32615bbee7b3e745')
+ version('2.5.0', sha256='58292c459598c9297258bf57acc055f701c727f0154a86af8c0947dde37d8172')
+ version('2.4.1', sha256='df30b98acb6ef892da8b4776175510cff2131908fd0526b6bad960c55a830a1b')
+ version('2.3.0', sha256='374bb047874a506507912c3717d0ce62affbaa9a22bcb494d63d60326a0867b5')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-six@1.9:', when='@3:', type=('build', 'run'))
@@ -38,6 +44,13 @@ class PyProtobuf(PythonPackage):
depends_on('py-unittest2', when='@3: ^python@:2', type=('build', 'run'))
depends_on('protobuf', when='+cpp')
+ @property
+ def build_directory(self):
+ if self.spec.satisfies('@3.1.0'):
+ return 'python'
+ else:
+ return '.'
+
@when('+cpp')
def build_args(self, spec, prefix):
return ['--cpp_implementation']
diff --git a/var/spack/repos/builtin/packages/py-pytest-cache/package.py b/var/spack/repos/builtin/packages/py-pytest-cache/package.py
index 6094911d4f..a98920c7f1 100644
--- a/var/spack/repos/builtin/packages/py-pytest-cache/package.py
+++ b/var/spack/repos/builtin/packages/py-pytest-cache/package.py
@@ -17,5 +17,5 @@ class PyPytestCache(PythonPackage):
version('1.0', sha256='be7468edd4d3d83f1e844959fd6e3fd28e77a481440a7118d430130ea31b07a9')
depends_on('py-setuptools', type='build')
- depends_on('py-pytest@2.2.0:2.7.3', type=('build', 'run'))
+ depends_on('py-pytest@2.2:', type=('build', 'run'))
depends_on('py-execnet@1.2:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pytest-pep8/package.py b/var/spack/repos/builtin/packages/py-pytest-pep8/package.py
new file mode 100644
index 0000000000..841c2ef8ff
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pytest-pep8/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPytestPep8(PythonPackage):
+ """pytest plugin for efficiently checking PEP8 compliance"""
+
+ homepage = "https://bitbucket.org/pytest-dev/pytest-pep8"
+ url = "https://pypi.io/packages/source/p/pytest-pep8/pytest-pep8-1.0.6.tar.gz"
+
+ import_modules = ['pytest_pep8']
+
+ version('1.0.6', sha256='032ef7e5fa3ac30f4458c73e05bb67b0f036a8a5cb418a534b3170f89f120318')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pytest-cache', type=('build', 'run'))
+ depends_on('py-pytest@2.4.2:', type=('build', 'run'))
+ depends_on('py-pep8@1.3:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py b/var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py
new file mode 100644
index 0000000000..26b4a789d0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py
@@ -0,0 +1,47 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyTensorflowEstimator(Package):
+ """TensorFlow Estimator is a high-level TensorFlow API that greatly
+ simplifies machine learning programming."""
+
+ homepage = "https://github.com/tensorflow/estimator"
+ url = "https://github.com/tensorflow/estimator/archive/v1.13.0.tar.gz"
+
+ version('2.0.0', sha256='6f4bdf1ab219e1f1cba25d2af097dc820f56479f12a839853d97422fe4d8b465')
+ version('1.13.0', sha256='a787b150ff436636df723e507019c72a5d6486cfe506886279d380166953f12f', preferred=True)
+
+ extends('python')
+
+ depends_on('py-tensorflow@2.0.0', when='@2.0.0')
+ depends_on('py-tensorflow@1.13.1', when='@1.13.0')
+
+ depends_on('bazel@0.19.0', type='build')
+ depends_on('py-pip', type='build')
+ depends_on('py-funcsigs@1.0.2:', type=('build', 'run'))
+
+ def install(self, spec, prefix):
+ tmp_path = join_path(env.get('SPACK_TMPDIR', '/tmp/spack'),
+ 'tf-estimator',
+ self.module.site_packages_dir[1:])
+ mkdirp(tmp_path)
+ env['TEST_TMPDIR'] = tmp_path
+ env['HOME'] = tmp_path
+
+ # bazel uses system PYTHONPATH instead of spack paths
+ bazel('--action_env', 'PYTHONPATH={0}'.format(env['PYTHONPATH']),
+ '//tensorflow_estimator/tools/pip_package:build_pip_package')
+
+ build_pip_package = Executable(join_path(
+ 'bazel-bin/tensorflow_estimator/tools',
+ 'pip_package/build_pip_package'))
+ build_pip_package(tmp_path)
+
+ pip = Executable('pip')
+ pip('install', '--prefix={0}'.format(prefix),
+ '--find-links={0}'.format(tmp_path), 'tensorflow-estimator')
diff --git a/var/spack/repos/builtin/packages/py-tensorflow/crosstool.patch b/var/spack/repos/builtin/packages/py-tensorflow/crosstool.patch
new file mode 100644
index 0000000000..3583211cab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow/crosstool.patch
@@ -0,0 +1,21 @@
++++ tensorflow-1.0.0-rc2/third_party/gpus/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl 2017-02-15 20:40:47.633496842 +0100
+--- tensorflow-1.0.0-rc2/third_party/gpus/crosstool/clang/bin/crosstool_wrapper_driver_is_not_gcc.tpl 2017-02-08 19:32:14.000000000 +0100
+@@ -46,13 +46,13 @@ import sys
+ import pipes
+
+ # Template values set by cuda_autoconf.
+-CPU_COMPILER = ('%{cpu_compiler}')
+-GCC_HOST_COMPILER_PATH = ('%{gcc_host_compiler_path}')
++CPU_COMPILER = os.environ['SPACK_CC']
++GCC_HOST_COMPILER_PATH = CPU_COMPILER
+
+ CURRENT_DIR = os.path.dirname(sys.argv[0])
+ NVCC_PATH = CURRENT_DIR + '/../../../cuda/bin/nvcc'
+-LLVM_HOST_COMPILER_PATH = ('/usr/bin/gcc')
+-PREFIX_DIR = os.path.dirname(GCC_HOST_COMPILER_PATH)
++LLVM_HOST_COMPILER_PATH = os.environ['SPACK_CC']
++PREFIX_DIR = os.path.dirname(os.environ['SPACK_CC'])
+ NVCC_VERSION = '%{cuda_version}'
+
+ def Log(s):
+
diff --git a/var/spack/repos/builtin/packages/py-tensorflow/http_archive.patch b/var/spack/repos/builtin/packages/py-tensorflow/http_archive.patch
new file mode 100644
index 0000000000..ea919567c1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow/http_archive.patch
@@ -0,0 +1,57 @@
+diff --git a/WORKSPACE b/WORKSPACE
+index 1796182..84b5eab 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -1,5 +1,7 @@
+ workspace(name = "org_tensorflow")
+
++load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file")
++
+ http_archive(
+ name = "io_bazel_rules_closure",
+ sha256 = "a38539c5b5c358548e75b44141b4ab637bba7c4dc02b46b1f62a96d6433f56ae",
+@@ -30,7 +32,7 @@ android_workspace()
+ # Please add all new TensorFlow dependencies in workspace.bzl.
+ tf_workspace()
+
+-new_http_archive(
++http_archive(
+ name = "inception_v1",
+ build_file = "models.BUILD",
+ sha256 = "7efe12a8363f09bc24d7b7a450304a15655a57a7751929b2c1593a71183bb105",
+@@ -40,7 +42,7 @@ new_http_archive(
+ ],
+ )
+
+-new_http_archive(
++http_archive(
+ name = "mobile_ssd",
+ build_file = "models.BUILD",
+ sha256 = "bddd81ea5c80a97adfac1c9f770e6f55cbafd7cce4d3bbe15fbeb041e6b8f3e8",
+@@ -50,7 +52,7 @@ new_http_archive(
+ ],
+ )
+
+-new_http_archive(
++http_archive(
+ name = "mobile_multibox",
+ build_file = "models.BUILD",
+ sha256 = "859edcddf84dddb974c36c36cfc1f74555148e9c9213dedacf1d6b613ad52b96",
+@@ -60,7 +62,7 @@ new_http_archive(
+ ],
+ )
+
+-new_http_archive(
++http_archive(
+ name = "stylize",
+ build_file = "models.BUILD",
+ sha256 = "3d374a730aef330424a356a8d4f04d8a54277c425e274ecb7d9c83aa912c6bfa",
+@@ -70,7 +72,7 @@ new_http_archive(
+ ],
+ )
+
+-new_http_archive(
++http_archive(
+ name = "speech_commands",
+ build_file = "models.BUILD",
+ sha256 = "c3ec4fea3158eb111f1d932336351edfe8bd515bb6e87aad4f25dbad0a600d0c",
diff --git a/var/spack/repos/builtin/packages/py-tensorflow/io_bazel_rules_docker2.patch b/var/spack/repos/builtin/packages/py-tensorflow/io_bazel_rules_docker2.patch
new file mode 100644
index 0000000000..5b341e69f1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow/io_bazel_rules_docker2.patch
@@ -0,0 +1,18 @@
+diff --git a/WORKSPACE b/WORKSPACE
+index 74ea14d..0b09a6e 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -34,6 +34,13 @@ load(
+
+ bazel_toolchains_repositories()
+
++http_archive(
++ name = "io_bazel_rules_docker",
++ sha256 = "413bb1ec0895a8d3249a01edf24b82fd06af3c8633c9fb833a0cb1d4b234d46d",
++ strip_prefix = "rules_docker-0.12.0",
++ urls = ["https://github.com/bazelbuild/rules_docker/releases/download/v0.12.0/rules_docker-v0.12.0.tar.gz"],
++)
++
+ load(
+ "@io_bazel_rules_docker//repositories:repositories.bzl",
+ container_repositories = "repositories",
diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py
new file mode 100644
index 0000000000..fa76b31388
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py
@@ -0,0 +1,698 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import glob
+import os
+import sys
+
+
+class PyTensorflow(Package, CudaPackage):
+ """TensorFlow is an Open Source Software Library for Machine Intelligence
+ """
+
+ homepage = "https://www.tensorflow.org"
+ url = "https://github.com/tensorflow/tensorflow/archive/v2.0.0.tar.gz"
+
+ maintainers = ['adamjstewart']
+ import_modules = ['tensorflow']
+
+ version('2.1.0-rc0', sha256='674cc90223f1d6b7fa2969e82636a630ce453e48a9dec39d73d6dba2fd3fd243')
+ version('2.0.0', sha256='49b5f0495cd681cbcb5296a4476853d4aea19a43bdd9f179c928a977308a0617', preferred=True)
+ version('1.15.0', sha256='a5d49c00a175a61da7431a9b289747d62339be9cf37600330ad63b611f7f5dc9')
+ version('1.14.0', sha256='aa2a6a1daafa3af66807cfe0bc77bfe1144a9a53df9a96bab52e3e575b3047ed')
+ version('1.13.2', sha256='abe3bf0c47845a628b7df4c57646f41a10ee70f914f1b018a5c761be75e1f1a9')
+ version('1.13.1', sha256='7cd19978e6bc7edc2c847bce19f95515a742b34ea5e28e4389dade35348f58ed')
+ version('1.12.3', sha256='b9e5488e84f4a133ed20b18605f0cd6301f11d356bd959712db4e7b9301d0462')
+ version('1.12.2', sha256='90ffc7cf1df5e4b8385c9108db18d5d5034ec423547c0e167d44f5746a20d06b')
+ version('1.12.1', sha256='7b559a3ae56322b7a7e4307f45f9fce96022c533a98b32c18bfdff8c5838271d')
+ version('1.12.0', sha256='3c87b81e37d4ed7f3da6200474fa5e656ffd20d8811068572f43610cae97ca92')
+ version('1.11.0', sha256='f49ce3f1d04cee854bc9f74fa9696991140b34a2e2447f35f01391b72c8bfa9f')
+ version('1.10.1', sha256='83092d709800e2d93d4d4b1bcacaeb74f2f328962ed764cb35bbee20402879c6')
+ version('1.10.0', sha256='ee9cb98d9e0d8106f2f4ed52a38fe89399324af303e1401567e5b64a9f86744b')
+ version('1.9.0', sha256='ffc3151b06823d57b4a408261ba8efe53601563dfe93af0866751d4f6ca5068c')
+ version('1.8.0', sha256='47646952590fd213b747247e6870d89bb4a368a95ae3561513d6c76e44f92a75')
+ version('1.7.1', sha256='3147f8c60d1f30da23a831bcf732e74b935dcee7c62e4b8b85f0f093030b52c8')
+ version('1.7.0', sha256='c676a96fc8700722816b2b98c85578b2f99fac7a7b2484c9c7f0641484f8d50d')
+ version('1.6.0', sha256='03cf1423446abbead6bd8c3cf6e6affa7d99746cd119691b012aac9a1795f4fb')
+ version('1.5.1', sha256='cab2157783905e12a7a3baae3264edfb739dd92d5658019a131fff4b14190240')
+ version('1.5.0', sha256='0642781c3a3a8c2c4834b91b86aec385f0b2ada7d721571458079478cc5b29c8')
+ version('1.4.1', sha256='1f75e463318419a1b3ae076d5a92697c1d3a85e8377c946a5510b651ff5c0d60')
+ version('1.4.0', sha256='8a0ad8d61f8f6c0282c548661994a5ab83ac531bac496c3041dedc1ab021107b')
+ version('1.3.1', sha256='ded509c209f8a1d390df8a2f44be5b5c29963172b0e0f095304efb59765d0523')
+ version('1.3.0', sha256='e1af1bb767b57c3416de0d43a5f74d174c42b85231dffd36f3630173534d4307')
+ version('1.2.1', sha256='f2baf09b1a9a0500907b4d5cb5473070b3ecede06ed6e8d1096873c91922fb9e')
+ version('1.2.0', sha256='03dbf7548d1fc1c11ed58da5fa68616f795c819f868f43478cbcaa26abed374f')
+ version('1.1.0', sha256='aad4470f52fa59f54de7b9a2da727429e6755d91d756f245f952698c42a60027')
+ version('1.0.1', sha256='deea3c65e0703da96d9c3f1162e464c51d37659dd129396af134e9e8f1ea8c05')
+ version('1.0.0', sha256='db8b3b8f4134b7c9c1b4165492ad5d5bb78889fcd99ffdffc325e97da3e8c677')
+ version('0.12.0', sha256='13a1d4e98c82eae7e26fe75384de1517d6126f63ba5d302392ec02ac3ae4b1b9')
+ version('0.11.0', sha256='24242ff696234bb1e58d09d45169b148525ccb706f980a4a92ddd3b82c7546dc')
+ version('0.10.0', sha256='f32df04e8f7186aaf6723fc5396733b2f6c2fd6fe4a53a54a68b80f3ec855680')
+ version('0.9.0', sha256='3128c396af19518c642d3e590212291e1d93c5b047472a10cf3245b53adac9c9')
+ version('0.8.0', sha256='f201ba7fb7609a6416968d4e1920d87d67be693b5bc7d34b6b4a79860a9a8a4e')
+ version('0.7.1', sha256='ef34121432f7a522cf9f99a56cdd86e370cc5fa3ee31255ca7cb17f36b8dfc0d')
+ version('0.7.0', sha256='43dd3051f947aa66e6fc09dac2f86a2efe2e019736bbd091c138544b86d717ce')
+ version('0.6.0', sha256='f86ace45e99053b09749cd55ab79c57274d8c7460ae763c5e808d81ffbc3b657')
+
+ variant('mkl', default=False, description='Build with MKL support')
+ variant('jemalloc', default=False, description='Build with jemalloc as malloc support')
+ # FIXME: ~gcp does not build for 2.0.0
+ # See https://github.com/tensorflow/tensorflow/issues/34878
+ variant('gcp', default=True, description='Build with Google Cloud Platform support')
+ variant('hdfs', default=False, description='Build with Hadoop File System support')
+ variant('aws', default=False, description='Build with Amazon AWS Platform support')
+ variant('kafka', default=False, description='Build with Apache Kafka Platform support')
+ variant('ignite', default=False, description='Build with Apache Ignite support')
+ variant('xla', default=False, description='Build with XLA JIT support')
+ variant('gdr', default=False, description='Build with GDR support')
+ variant('verbs', default=False, description='Build with libverbs support')
+ variant('ngraph', default=False, description='Build with Intel nGraph support')
+ variant('opencl', default=False, description='Build with OpenCL SYCL support')
+ variant('computecpp', default=False, description='Build with ComputeCPP support')
+ variant('rocm', default=False, description='Build with ROCm support')
+ variant('tensorrt', default=False, description='Build with TensorRT support')
+ variant('cuda', default=sys.platform != 'darwin', description='Build with CUDA support')
+ variant('nccl', default=sys.platform.startswith('linux'), description='Enable NVIDIA NCCL support')
+ variant('mpi', default=False, description='Build with MPI support')
+ variant('android', default=False, description='Configure for Android builds')
+ variant('ios', default=False, description='Build with iOS support (macOS only)')
+ variant('monolithic', default=False, description='Static monolithic build')
+ variant('numa', default=False, description='Build with NUMA support')
+ variant('dynamic_kernels', default=False, description='Build kernels into separate shared objects')
+
+ extends('python')
+
+ # TODO: Older versions of TensorFlow don't list the viable version range,
+ # just the minimum version of bazel that will work. The latest version of
+ # bazel doesn't seem to work, so for now we force them to use min version.
+ # Need to investigate further.
+
+ # See _TF_MIN_BAZEL_VERSION and _TF_MAX_BAZEL_VERSION in configure.py
+ depends_on('bazel@0.27.1:0.29.1', type='build', when='@2.1:')
+ depends_on('bazel@0.24.1:0.26.1', type='build', when='@1.15:2.0')
+ # See call to check_bazel_version in configure.py
+ depends_on('bazel@0.24.1:0.25.2', type='build', when='@1.14.0')
+ depends_on('bazel@0.19.0:0.21.0', type='build', when='@1.13.0:1.13.2')
+ depends_on('bazel@0.24.1:0.25.0', type='build', when='@1.12.1')
+ depends_on('bazel@0.15.0', type='build', when='@1.10:1.12.0,1.12.2:1.12.3')
+ depends_on('bazel@0.10.0', type='build', when='@1.7:1.9')
+ # See call to check_version in tensorflow/workspace.bzl
+ depends_on('bazel@0.5.4', type='build', when='@1.4:1.6')
+ # See MIN_BAZEL_VERSION in configure
+ depends_on('bazel@0.4.5', type='build', when='@1.2:1.3')
+ # See call to check_version in WORKSPACE
+ depends_on('bazel@0.4.2', type='build', when='@1.0:1.1')
+ depends_on('bazel@0.3.2', type='build', when='@0.12.0:0.12.1')
+ depends_on('bazel@0.3.0', type='build', when='@0.11.0')
+ depends_on('bazel@0.2.0', type='build', when='@0.9:0.10')
+ depends_on('bazel@0.1.4', type='build', when='@0.7:0.8')
+ depends_on('bazel@0.1.1', type='build', when='@0.5:0.6')
+
+ depends_on('swig', type='build')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-future', type='build', when='^python@:2')
+
+ # Listed under REQUIRED_PACKAGES in tensorflow/tools/pip_package/setup.py
+ depends_on('py-absl-py@0.7.0:', type=('build', 'run'), when='@1.12.1,1.14:')
+ depends_on('py-absl-py@0.1.6:', type=('build', 'run'), when='@1.5:')
+ depends_on('py-astor@0.6.0:', type=('build', 'run'), when='@1.6:')
+ depends_on('py-backports-weakref@1.0:', type=('build', 'run'), when='@1.3: ^python@:3.3')
+ depends_on('py-backports-weakref@1.0rc1', type=('build', 'run'), when='@1.2.0:1.2.1')
+ depends_on('py-enum34@1.1.6:', type=('build', 'run'), when='@1.5: ^python@:3.3')
+ depends_on('py-enum34@1.1.6:', type=('build', 'run'), when='@1.4.0:1.4.1')
+ depends_on('py-gast@0.2.2', type=('build', 'run'), when='@1.15:')
+ depends_on('py-gast@0.2.0:', type=('build', 'run'), when='@1.6:')
+ depends_on('py-google-pasta@0.1.6:', type=('build', 'run'), when='@1.14:')
+ depends_on('py-google-pasta@0.1.2:', type=('build', 'run'), when='@1.12.1')
+ depends_on('py-keras-applications@1.0.8:', type=('build', 'run'), when='@1.15:')
+ depends_on('py-keras-applications@1.0.6:', type=('build', 'run'), when='@1.12:')
+ depends_on('py-keras-applications@1.0.5:', type=('build', 'run'), when='@1.11:')
+ depends_on('py-keras-preprocessing@1.1.0:', type=('build', 'run'), when='@2.1:')
+ depends_on('py-keras-preprocessing@1.0.5:', type=('build', 'run'), when='@1.12:')
+ depends_on('py-keras-preprocessing@1.0.3:', type=('build', 'run'), when='@1.11:')
+ depends_on('py-numpy@1.16.0:1.999', type=('build', 'run'), when='@1.13.2,1.15:')
+ depends_on('py-numpy@1.14.5:1.999', type=('build', 'run'), when='@1.12.1,1.14.0')
+ depends_on('py-numpy@1.13.3:1.14.5', type=('build', 'run'), when='@1.10.0:1.10.1')
+ depends_on('py-numpy@1.13.3:', type=('build', 'run'), when='@1.6:')
+ depends_on('py-numpy@1.12.1:', type=('build', 'run'), when='@1.4:')
+ depends_on('py-numpy@1.11.0:', type=('build', 'run'), when='@0.11:')
+ depends_on('py-numpy@1.10.1:', type=('build', 'run'), when='@0.7.1: platform=darwin')
+ depends_on('py-numpy@1.8.2:', type=('build', 'run'), when='@0.6:')
+ depends_on('py-numpy@1.9.2:', type=('build', 'run'), when='@0.5.0')
+ depends_on('py-opt-einsum@2.3.2:', type=('build', 'run'), when='@1.15:')
+ depends_on('py-protobuf@3.8.0:', type=('build', 'run'), when='@2.1:')
+ depends_on('py-protobuf@3.6.1:', type=('build', 'run'), when='@1.12:')
+ depends_on('py-protobuf@3.6.0:', type=('build', 'run'), when='@1.10:')
+ depends_on('py-protobuf@3.4.0:', type=('build', 'run'), when='@1.5:')
+ depends_on('py-protobuf@3.3.0:', type=('build', 'run'), when='@1.3:')
+ depends_on('py-protobuf@3.2.0:', type=('build', 'run'), when='@1.1:')
+ depends_on('py-protobuf@3.1.0:', type=('build', 'run'), when='@0.12.1:')
+ depends_on('py-protobuf@3.1.0', type=('build', 'run'), when='@0.12.0')
+ depends_on('py-protobuf@3.0.0', type=('build', 'run'), when='@0.11.0')
+ depends_on('py-protobuf@3.0.0b2', type=('build', 'run'), when='@0.7.1:0.10')
+ depends_on('py-protobuf@3.0.0a3', type=('build', 'run'), when='@0.6:0.7.0')
+ # tensorboard
+ # tensorflow-estimator
+ depends_on('py-termcolor@1.1.0:', type=('build', 'run'), when='@1.6:')
+ depends_on('py-wrapt@1.11.1:', type=('build', 'run'), when='@1.12.1,1.14:')
+ depends_on('py-wheel', type=('build', 'run'), when='@0.6:')
+ depends_on('py-wheel@0.26:', type=('build', 'run'), when='@0.6: ^python@3:')
+ depends_on('py-mock@2.0.0:', type=('build', 'run'), when='@0.10: ^python@:2')
+ depends_on('py-functools32@3.2.3:', type=('build', 'run'), when='@1.15: ^python@:2')
+ depends_on('py-six@1.12.0:', type=('build', 'run'), when='@2.1:')
+ depends_on('py-six@1.10.0:', type=('build', 'run'), when='@:2.0')
+ depends_on('py-grpcio@1.8.6:', type=('build', 'run'), when='@1.6:1.7')
+ if sys.byteorder == 'little':
+ # Only builds correctly on little-endian machines
+ depends_on('py-grpcio@1.8.6:', type=('build', 'run'), when='@1.8:')
+
+ # Listed under TEST_PACKAGES in tensorflow/tools/pip_package/setup.py
+ depends_on('py-scipy@0.15.1:', type='test')
+
+ # TODO: add packages for some of these dependencies
+ depends_on('mkl', when='+mkl')
+ depends_on('curl', when='+gcp')
+ # depends_on('computecpp', when='+opencl+computecpp')
+ # depends_on('trisycl', when='+opencl~computepp')
+ depends_on('cudnn', when='+cuda')
+ depends_on('cudnn@6.5', when='@0.5:0.6 +cuda')
+ # depends_on('tensorrt', when='+tensorrt')
+ depends_on('nccl', when='+nccl')
+ depends_on('mpi', when='+mpi')
+ # depends_on('android-ndk@10:18', when='+android')
+ # depends_on('android-sdk', when='+android')
+
+ # Check configure and configure.py to see when these variants are supported
+ conflicts('+mkl', when='@:1.0')
+ conflicts('+mkl', when='platform=darwin', msg='Darwin is not yet supported')
+ conflicts('+jemalloc', when='@:0')
+ conflicts('+jemalloc', when='platform=darwin', msg='Currently jemalloc is only support on Linux platform')
+ conflicts('+jemalloc', when='platform=cray', msg='Currently jemalloc is only support on Linux platform')
+ conflicts('+jemalloc', when='platform=bgq', msg='Currently jemalloc is only support on Linux platform')
+ conflicts('+gcp', when='@:0.8')
+ conflicts('+hdfs', when='@:0.10')
+ conflicts('+aws', when='@:1.3')
+ conflicts('+kafka', when='@:1.5')
+ conflicts('+ignite', when='@:1.11')
+ conflicts('+xla', when='@:0')
+ conflicts('+gdr', when='@:1.3')
+ conflicts('+verbs', when='@:1.1')
+ conflicts('+ngraph', when='@:1.10')
+ conflicts('+opencl', when='@:0.11')
+ conflicts('+computecpp', when='@:0.11')
+ conflicts('+computecpp', when='~opencl')
+ conflicts('+rocm', when='@:1.11')
+ conflicts('+cuda', when='platform=darwin', msg='There is no GPU support for macOS')
+ conflicts('+tensorrt', when='@:1.5')
+ conflicts('+tensorrt', when='~cuda')
+ conflicts('+tensorrt', when='platform=darwin', msg='Currently TensorRT is only supported on Linux platform')
+ conflicts('+tensorrt', when='platform=cray', msg='Currently TensorRT is only supported on Linux platform')
+ conflicts('+tensorrt', when='platform=bgq', msg='Currently TensorRT is only supported on Linux platform')
+ conflicts('+nccl', when='@:1.7')
+ conflicts('+nccl', when='~cuda')
+ conflicts('+nccl', when='platform=darwin', msg='Currently NCCL is only supported on Linux platform')
+ conflicts('+nccl', when='platform=cray', msg='Currently NCCL is only supported on Linux platform')
+ conflicts('+nccl', when='platform=bgq', msg='Currently NCCL is only supported on Linux platform')
+ conflicts('+mpi', when='@:1.2')
+ conflicts('+android', when='@:1.4')
+ conflicts('+ios', when='@:1.12.0,1.12.2:1.13')
+ conflicts('+ios', when='platform=linux', msg='iOS support only available on macOS')
+ conflicts('+ios', when='platform=cray', msg='iOS support only available on macOS')
+ conflicts('+ios', when='platform=bgq', msg='iOS support only available on macOS')
+ conflicts('+monolithic', when='@:1.3')
+ conflicts('+numa', when='@:1.12.0,1.12.2:1.13')
+ conflicts('+dynamic_kernels', when='@:1.12.0,1.12.2:1.12.3')
+
+ # TODO: why is this needed?
+ patch('url-zlib.patch', when='@0.10.0')
+ # TODO: why is this needed?
+ patch('crosstool.patch', when='@0.10.0+cuda')
+ # Avoid build error: "no such package '@io_bazel_rules_docker..."
+ patch('io_bazel_rules_docker2.patch', when='@1.15.0,2.0.0')
+ # Avoide build error: "name 'new_http_archive' is not defined"
+ patch('http_archive.patch', when='@1.12.3')
+
+ phases = ['configure', 'build', 'install']
+
+ # https://www.tensorflow.org/install/source
+ def setup_build_environment(self, env):
+ spec = self.spec
+
+ # Please specify the location of python
+ env.set('PYTHON_BIN_PATH', spec['python'].command.path)
+
+ # Please input the desired Python library path to use
+ env.set('PYTHON_LIB_PATH', site_packages_dir)
+
+ # Ensure swig is in PATH or set SWIG_PATH
+ env.set('SWIG_PATH', spec['swig'].prefix.bin.swig)
+
+ # Do you wish to build TensorFlow with MKL support?
+ if '+mkl' in spec:
+ env.set('TF_NEED_MKL', '1')
+
+ # Do you wish to download MKL LIB from the web?
+ env.set('TF_DOWNLOAD_MKL', '0')
+
+ # Please specify the location where MKL is installed
+ env.set('MKL_INSTALL_PATH', spec['mkl'].prefix)
+ else:
+ env.set('TF_NEED_MKL', '0')
+
+ # Do you wish to build TensorFlow with jemalloc as malloc support?
+ if '+jemalloc' in spec:
+ env.set('TF_NEED_JEMALLOC', '1')
+ else:
+ env.set('TF_NEED_JEMALLOC', '0')
+
+ # Do you wish to build TensorFlow with Google Cloud Platform support?
+ if '+gcp' in spec:
+ env.set('TF_NEED_GCP', '1')
+ else:
+ env.set('TF_NEED_GCP', '0')
+
+ # Do you wish to build TensorFlow with Hadoop File System support?
+ if '+hdfs' in spec:
+ env.set('TF_NEED_HDFS', '1')
+ else:
+ env.set('TF_NEED_HDFS', '0')
+
+ # Do you wish to build TensorFlow with Amazon AWS Platform support?
+ if '+aws' in spec:
+ env.set('TF_NEED_AWS', '1')
+ env.set('TF_NEED_S3', '1')
+ else:
+ env.set('TF_NEED_AWS', '0')
+ env.set('TF_NEED_S3', '0')
+
+ # Do you wish to build TensorFlow with Apache Kafka Platform support?
+ if '+kafka' in spec:
+ env.set('TF_NEED_KAFKA', '1')
+ else:
+ env.set('TF_NEED_KAFKA', '0')
+
+ # Do you wish to build TensorFlow with Apache Ignite support?
+ if '+ignite' in spec:
+ env.set('TF_NEED_IGNITE', '1')
+ else:
+ env.set('TF_NEED_IGNITE', '0')
+
+ # Do you wish to build TensorFlow with XLA JIT support?
+ if '+xla' in spec:
+ env.set('TF_ENABLE_XLA', '1')
+ else:
+ env.set('TF_ENABLE_XLA', '0')
+
+ # Do you wish to build TensorFlow with GDR support?
+ if '+gdr' in spec:
+ env.set('TF_NEED_GDR', '1')
+ else:
+ env.set('TF_NEED_GDR', '0')
+
+ # Do you wish to build TensorFlow with VERBS support?
+ if '+verbs' in spec:
+ env.set('TF_NEED_VERBS', '1')
+ else:
+ env.set('TF_NEED_VERBS', '0')
+
+ # Do you wish to build TensorFlow with nGraph support?
+ if '+ngraph' in spec:
+ env.set('TF_NEED_NGRAPH', '1')
+ else:
+ env.set('TF_NEED_NGRAPH', '0')
+
+ # Do you wish to build TensorFlow with OpenCL SYCL support?
+ if '+opencl' in spec:
+ env.set('TF_NEED_OPENCL_SYCL', '1')
+ env.set('TF_NEED_OPENCL', '1')
+
+ # Please specify which C++ compiler should be used as the host
+ # C++ compiler
+ env.set('HOST_CXX_COMPILER', spack_cxx)
+
+ # Please specify which C compiler should be used as the host
+ # C compiler
+ env.set('HOST_C_COMPILER', spack_cc)
+
+ # Do you wish to build TensorFlow with ComputeCPP support?
+ if '+computecpp' in spec:
+ env.set('TF_NEED_COMPUTECPP', '1')
+
+ # Please specify the location where ComputeCpp is installed
+ env.set('COMPUTECPP_TOOLKIT_PATH', spec['computecpp'].prefix)
+ else:
+ env.set('TF_NEED_COMPUTECPP', '0')
+
+ # Please specify the location of the triSYCL include directory
+ env.set('TRISYCL_INCLUDE_DIR', spec['trisycl'].prefix.include)
+ else:
+ env.set('TF_NEED_OPENCL_SYCL', '0')
+ env.set('TF_NEED_OPENCL', '0')
+
+ # Do you wish to build TensorFlow with ROCm support?
+ if '+rocm' in spec:
+ env.set('TF_NEED_ROCM', '1')
+ else:
+ env.set('TF_NEED_ROCM', '0')
+
+ # Do you wish to build TensorFlow with CUDA support?
+ if '+cuda' in spec:
+ env.set('TF_NEED_CUDA', '1')
+
+ # Do you want to use clang as CUDA compiler?
+ env.set('TF_CUDA_CLANG', '0')
+
+ # Please specify which gcc nvcc should use as the host compiler
+ env.set('GCC_HOST_COMPILER_PATH', spack_cc)
+
+ cuda_paths = [
+ spec['cuda'].prefix,
+ spec['cudnn'].prefix,
+ ]
+
+ # Do you wish to build TensorFlow with TensorRT support?
+ if '+tensorrt' in spec:
+ env.set('TF_NEED_TENSORRT', '1')
+
+ cuda_paths.append(spec['tensorrt'].prefix)
+
+ # Please specify the TensorRT version you want to use
+ env.set('TF_TENSORRT_VERSION',
+ spec['tensorrt'].version.up_to(1))
+
+ # Please specify the location where TensorRT is installed
+ env.set('TENSORRT_INSTALL_PATH', spec['tensorrt'].prefix)
+ else:
+ env.set('TF_NEED_TENSORRT', '0')
+ env.unset('TF_TENSORRT_VERSION')
+
+ # Please specify the CUDA SDK version you want to use
+ env.set('TF_CUDA_VERSION', spec['cuda'].version.up_to(2))
+
+ # Please specify the cuDNN version you want to use
+ env.set('TF_CUDNN_VERSION', spec['cudnn'].version.up_to(1))
+
+ if '+nccl' in spec:
+ cuda_paths.append(spec['nccl'].prefix)
+
+ # Please specify the locally installed NCCL version to use
+ env.set('TF_NCCL_VERSION', spec['nccl'].version.up_to(1))
+
+ # Please specify the location where NCCL is installed
+ env.set('NCCL_INSTALL_PATH', spec['nccl'].prefix)
+ env.set('NCCL_HDR_PATH', spec['nccl'].prefix.include)
+ else:
+ env.unset('TF_NCCL_VERSION')
+
+ # Please specify the comma-separated list of base paths to
+ # look for CUDA libraries and headers
+ env.set('TF_CUDA_PATHS', ','.join(cuda_paths))
+
+ # Please specify the location where CUDA toolkit is installed
+ env.set('CUDA_TOOLKIT_PATH', spec['cuda'].prefix)
+
+ # Please specify the location where CUDNN library is installed
+ env.set('CUDNN_INSTALL_PATH', spec['cudnn'].prefix)
+
+ # Please specify a list of comma-separated CUDA compute
+ # capabilities you want to build with. You can find the compute
+ # capability of your device at:
+ # https://developer.nvidia.com/cuda-gpus.
+ # Please note that each additional compute capability significantly
+ # increases your build time and binary size, and that TensorFlow
+ # only supports compute capabilities >= 3.5
+ if spec.variants['cuda_arch'].value != 'none':
+ capabilities = ','.join('{0:.1f}'.format(
+ float(i) / 10.0) for i in spec.variants['cuda_arch'].value)
+ env.set('TF_CUDA_COMPUTE_CAPABILITIES', capabilities)
+ else:
+ env.set('TF_NEED_CUDA', '0')
+
+ # Do you wish to download a fresh release of clang? (Experimental)
+ env.set('TF_DOWNLOAD_CLANG', '0')
+
+ # Do you wish to build TensorFlow with MPI support?
+ if '+mpi' in spec:
+ env.set('TF_NEED_MPI', '1')
+
+ # Please specify the MPI toolkit folder
+ env.set('MPI_HOME', spec['mpi'].prefix)
+ else:
+ env.set('TF_NEED_MPI', '0')
+ env.unset('MPI_HOME')
+
+ # Please specify optimization flags to use during compilation when
+ # bazel option '--config=opt' is specified
+ env.set('CC_OPT_FLAGS', spec.target.optimization_flags(
+ spec.compiler.name, spec.compiler.version))
+
+ # Would you like to interactively configure ./WORKSPACE for
+ # Android builds?
+ if '+android' in spec:
+ env.set('TF_SET_ANDROID_WORKSPACE', '1')
+
+ # Please specify the home path of the Android NDK to use
+ env.set('ANDROID_NDK_HOME', spec['android-ndk'].prefix)
+ env.set('ANDROID_NDK_API_LEVEL', spec['android-ndk'].version)
+
+ # Please specify the home path of the Android SDK to use
+ env.set('ANDROID_SDK_HOME', spec['android-sdk'].prefix)
+ env.set('ANDROID_SDK_API_LEVEL', spec['android-sdk'].version)
+
+ # Please specify the Android SDK API level to use
+ env.set('ANDROID_API_LEVEL', spec['android-sdk'].version)
+
+ # Please specify an Android build tools version to use
+ env.set('ANDROID_BUILD_TOOLS_VERSION', spec['android-sdk'].version)
+ else:
+ env.set('TF_SET_ANDROID_WORKSPACE', '0')
+
+ # Do you wish to build TensorFlow with iOS support?
+ if '+ios' in spec:
+ env.set('TF_CONFIGURE_IOS', '1')
+ else:
+ env.set('TF_CONFIGURE_IOS', '0')
+
+ # set tmpdir to a non-NFS filesystem
+ # (because bazel uses ~/.cache/bazel)
+ # TODO: This should be checked for non-nfsy filesystem, but the current
+ # best idea for it is to check
+ # subprocess.call([
+ # 'stat', '--file-system', '--format=%T', tmp_path
+ # ])
+ # to not be nfs. This is only valid for Linux and we'd like to
+ # stay at least also OSX compatible
+ tmp_path = '/tmp/spack/tf'
+ mkdirp(tmp_path)
+ env.set('TEST_TMPDIR', tmp_path)
+ # TODO: Is setting this necessary? It breaks `spack build-env`
+ # because Bash can't find my .bashrc
+ env.set('HOME', tmp_path)
+
+ def configure(self, spec, prefix):
+ # NOTE: configure script is interactive. If you set the appropriate
+ # environment variables, this interactivity is skipped. If you don't,
+ # Spack hangs during the configure phase. Use `spack build-env` to
+ # determine which environment variables must be set for a particular
+ # version.
+ configure()
+
+ @run_after('configure')
+ def post_configure_fixes(self):
+ spec = self.spec
+ if spec.satisfies('@1.5.0: ~android'):
+ # env variable is somehow ignored -> brute force
+ # TODO: find a better solution
+ filter_file(r'if workspace_has_any_android_rule\(\)',
+ r'if True',
+ 'configure.py')
+
+ # version dependent fixes
+ if spec.satisfies('@1.3.0:1.5.0'):
+ # checksum for protobuf that bazel downloads (@github) changed
+ filter_file(r'sha256 = "6d43b9d223ce09e5d4ce8b0060cb8a7513577a35a64c7e3dad10f0703bf3ad93"',
+ r'sha256 = "e5fdeee6b28cf6c38d61243adff06628baa434a22b5ebb7432d2a7fbabbdb13d"',
+ 'tensorflow/workspace.bzl')
+ # starting with tensorflow 1.3, tensorboard becomes a dependency
+ # (...but is not really needed? Tensorboard should depend on
+ # tensorflow, not the other way!)
+ # -> remove from list of required packages
+ filter_file(r"'tensorflow-tensorboard",
+ r"#'tensorflow-tensorboard",
+ 'tensorflow/tools/pip_package/setup.py')
+ if spec.satisfies('@1.5.0: ~gcp'):
+ # google cloud support seems to be installed on default, leading
+ # to boringssl error manually set the flag to false to avoid
+ # installing gcp support
+ # https://github.com/tensorflow/tensorflow/issues/20677#issuecomment-404634519
+ filter_file(r'--define with_gcp_support=true',
+ r'--define with_gcp_support=false',
+ '.tf_configure.bazelrc')
+ if spec.satisfies('@1.6.0:'):
+ # tensorboard name changed
+ filter_file(r"'tensorboard >=",
+ r"#'tensorboard >=",
+ 'tensorflow/tools/pip_package/setup.py')
+ if spec.satisfies('@1.8.0: ~opencl'):
+ # 1.8.0 and 1.9.0 aborts with numpy import error during python_api
+ # generation somehow the wrong PYTHONPATH is used...
+ # set --distinct_host_configuration=false as a workaround
+ # https://github.com/tensorflow/tensorflow/issues/22395#issuecomment-431229451
+ filter_file('build --action_env TF_NEED_OPENCL_SYCL="0"',
+ 'build --action_env TF_NEED_OPENCL_SYCL="0"\n'
+ 'build --distinct_host_configuration=false\n'
+ 'build --action_env PYTHONPATH="{0}"'.format(
+ env['PYTHONPATH']),
+ '.tf_configure.bazelrc')
+ if spec.satisfies('@1.13.1'):
+ # tensorflow_estimator is an API for tensorflow
+ # tensorflow-estimator imports tensorflow during build, so
+ # tensorflow has to be set up first
+ filter_file(r"'tensorflow_estimator >=",
+ r"#'tensorflow_estimator >=",
+ 'tensorflow/tools/pip_package/setup.py')
+ if spec.satisfies('@2.0.0:'):
+ # now it depends on the nightly versions...
+ filter_file(r"'tf-estimator-nightly >=",
+ r"#'tf-estimator-nightly >=",
+ 'tensorflow/tools/pip_package/setup.py')
+ filter_file(r"REQUIRED_PACKAGES\[i\] = 'tb-nightly >=",
+ r"pass #REQUIRED_PACKAGES\[i\] = 'tb-nightly >=",
+ 'tensorflow/tools/pip_package/setup.py')
+ filter_file(r"'tb-nightly >=",
+ r"#'tb-nightly >=",
+ 'tensorflow/tools/pip_package/setup.py')
+
+ if spec.satisfies('@1.13.1 +nccl'):
+ filter_file(
+ r'^build --action_env NCCL_INSTALL_PATH=.*',
+ r'build --action_env NCCL_INSTALL_PATH="' +
+ spec['nccl'].libs.directories[0] + '"',
+ '.tf_configure.bazelrc')
+ filter_file(
+ r'^build --action_env NCCL_HDR_PATH=.*',
+ r'build --action_env NCCL_HDR_PATH="' +
+ spec['nccl'].prefix.include + '"',
+ '.tf_configure.bazelrc')
+
+ if spec.satisfies('+cuda'):
+ libs = spec['cuda'].libs.directories
+ libs.extend(spec['cudnn'].libs.directories)
+ if '+nccl' in spec:
+ libs.extend(spec['nccl'].libs.directories)
+ if '+tensorrt' in spec:
+ libs.extend(spec['tensorrt'].libs.directories)
+ slibs = ':'.join(libs)
+
+ filter_file('build --action_env TF_NEED_OPENCL_SYCL="0"',
+ 'build --action_env TF_NEED_OPENCL_SYCL="0"\n'
+ 'build --action_env LD_LIBRARY_PATH="' + slibs + '"',
+ '.tf_configure.bazelrc')
+
+ def build(self, spec, prefix):
+ # https://docs.bazel.build/versions/master/command-line-reference.html
+ args = [
+ # Don't allow user or system .bazelrc to override build settings
+ '--nohome_rc',
+ '--nosystem_rc',
+ 'build',
+ # Spack logs don't handle colored output well
+ '--color=no',
+ '--jobs={0}'.format(make_jobs),
+ '--config=opt',
+ ]
+
+ # See .bazelrc for when each config flag is supported
+ if spec.satisfies('@1.12.1:'):
+ if '+mkl' in spec:
+ args.append('--config=mkl')
+
+ if '+monolithic' in spec:
+ args.append('--config=monolithic')
+
+ if '+gdr' in spec:
+ args.append('--config=gdr')
+
+ if '+verbs' in spec:
+ args.append('--config=verbs')
+
+ if '+ngraph' in spec:
+ args.append('--config=ngraph')
+
+ if '+dynamic_kernels' in spec:
+ args.append('--config=dynamic_kernels')
+
+ if '+cuda' in spec:
+ args.append('--config=cuda')
+
+ if '~aws' in spec:
+ args.append('--config=noaws')
+
+ if '~gcp' in spec:
+ args.append('--config=nogcp')
+
+ if '~hdfs' in spec:
+ args.append('--config=nohdfs')
+
+ if '~ignite' in spec:
+ args.append('--config=noignite')
+
+ if '~kafka' in spec:
+ args.append('--config=nokafka')
+
+ if '~nccl' in spec:
+ args.append('--config=nonccl')
+
+ if spec.satisfies('@1.12.1,1.14:'):
+ if '+numa' in spec:
+ args.append('--config=numa')
+
+ if spec.satisfies('@2:'):
+ args.append('--config=v2')
+
+ if spec.satisfies('%gcc@5:'):
+ args.append('--cxxopt=-D_GLIBCXX_USE_CXX11_ABI=0')
+
+ if spec.satisfies('@2.1:'):
+ # TODO: is this needed?
+ args.append('--define=tensorflow_mkldnn_contraction_kernel=0')
+
+ args.append('//tensorflow/tools/pip_package:build_pip_package')
+
+ bazel(*args)
+
+ build_pip_package = Executable(
+ 'bazel-bin/tensorflow/tools/pip_package/build_pip_package')
+ tmp_path = env['TEST_TMPDIR']
+ build_pip_package(tmp_path)
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ for fn in glob.iglob(join_path(
+ '../bazel-bin/tensorflow/tools/pip_package',
+ 'build_pip_package.runfiles/org_tensorflow/*')):
+ dst = os.path.basename(fn)
+ if not os.path.exists(dst):
+ os.symlink(fn, dst)
+ for fn in glob.iglob('../tensorflow/tools/pip_package/*'):
+ dst = os.path.basename(fn)
+ if not os.path.exists(dst):
+ os.symlink(fn, dst)
+
+ # macOS is case-insensitive, and BUILD file in directory
+ # containing setup.py causes the following error message:
+ # error: could not create 'build': File exists
+ # Delete BUILD file to prevent this.
+ os.remove('BUILD')
+
+ setup_py('install', '--prefix={0}'.format(prefix),
+ '--single-version-externally-managed', '--root=/')
+
+ @run_after('install')
+ @on_package_attributes(run_tests=True)
+ def import_module_test(self):
+ with working_dir('spack-test', create=True):
+ for module in self.import_modules:
+ python('-c', 'import {0}'.format(module))
diff --git a/var/spack/repos/builtin/packages/py-tensorflow/url-zlib.patch b/var/spack/repos/builtin/packages/py-tensorflow/url-zlib.patch
new file mode 100644
index 0000000000..9c4fdba09c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tensorflow/url-zlib.patch
@@ -0,0 +1,13 @@
+--- tensorflow-0.10.0/tensorflow/workspace.bzl 2016-09-08 23:49:36.000000000 +0200
++++ tensorflow-0.10.0/tensorflow/workspace.bzl 2017-01-31 09:11:07.391869277 +0100
+@@ -198,7 +198,7 @@ def tf_workspace(path_prefix = "", tf_re
+
+ native.new_http_archive(
+ name = "zlib_archive",
+- url = "http://zlib.net/zlib-1.2.8.tar.gz",
+- sha256 = "36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d",
++ url = "http://zlib.net/zlib-1.2.11.tar.gz",
++ sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
+ build_file = path_prefix + "zlib.BUILD",
+ )
+
diff --git a/var/spack/repos/builtin/packages/py-wrapt/package.py b/var/spack/repos/builtin/packages/py-wrapt/package.py
index 7e3bb32f98..887df80da1 100644
--- a/var/spack/repos/builtin/packages/py-wrapt/package.py
+++ b/var/spack/repos/builtin/packages/py-wrapt/package.py
@@ -10,6 +10,7 @@ class PyWrapt(PythonPackage):
"""Module for decorators, wrappers and monkey patching."""
homepage = "https://github.com/GrahamDumpleton/wrapt"
- url = "https://pypi.io/packages/source/w/wrapt/wrapt-1.10.10.tar.gz"
+ url = "https://pypi.io/packages/source/w/wrapt/wrapt-1.11.2.tar.gz"
+ version('1.11.2', sha256='565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1')
version('1.10.10', sha256='42160c91b77f1bc64a955890038e02f2f72986c01d462d53cb6cb039b995cdd9')