summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--system/bc/APKBUILD12
-rw-r--r--user/alpine/APKBUILD46
-rw-r--r--user/alpine/support-musl.patch20
-rw-r--r--user/dosbox/APKBUILD6
-rw-r--r--user/dosbox/gcc8-pmmx.patch32
-rw-r--r--user/llvm7/APKBUILD2
-rw-r--r--user/llvm7/more-secure-plt.patch13
-rw-r--r--user/node/APKBUILD71
-rw-r--r--user/node/gyp-python3.patch3862
-rw-r--r--user/node/libatomic.patch14
-rw-r--r--user/node/ppc32.patch18
-rw-r--r--user/node/ppc64.patch40
-rw-r--r--user/node/python3.patch163
-rw-r--r--user/node/stack-silliness.patch14
-rw-r--r--user/node/v8-python3.patch169
-rw-r--r--user/nsd/APKBUILD4
-rw-r--r--user/py3-pycairo/APKBUILD8
-rw-r--r--user/py3-pycairo/meson-idiocy.patch27
-rw-r--r--user/rust/0001-Don-t-pass-CFLAGS-to-the-C-compiler.patch12
-rw-r--r--user/rust/0002-Fix-LLVM-build.patch12
-rw-r--r--user/rust/0003-Allow-rustdoc-to-work-when-cross-compiling-on-musl.patch12
-rw-r--r--user/rust/0004-Require-static-native-libraries-when-linking-static-.patch12
-rw-r--r--user/rust/0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch204
-rw-r--r--user/rust/0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch10
-rw-r--r--user/rust/0007-Add-powerpc-unknown-linux-musl-target.patch66
-rw-r--r--user/rust/0007-runtest-Fix-proc-macro-tests-on-musl-hosts.patch (renamed from user/rust/0012-runtest-Fix-proc-macro-tests-on-musl-hosts.patch)12
-rw-r--r--user/rust/0008-Fix-powerpc64-ELFv2-big-endian-struct-passing-ABI.patch79
-rw-r--r--user/rust/0008-test-enum-debug-Correct-minimum-LLVM-version.patch67
-rw-r--r--user/rust/0009-Use-the-ELFv2-ABI-on-powerpc64-musl.patch50
-rw-r--r--user/rust/0009-test-use-extern-for-plugins-Don-t-assume-multilib.patch (renamed from user/rust/0018-test-use-extern-for-plugins-Don-t-assume-multilib.patch)6
-rw-r--r--user/rust/0010-Add-powerpc64-unknown-linux-musl-target.patch67
-rw-r--r--user/rust/0010-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch (renamed from user/rust/0019-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch)6
-rw-r--r--user/rust/0011-Ignore-broken-and-non-applicable-tests.patch (renamed from user/rust/0020-Ignore-broken-and-non-applicable-tests.patch)26
-rw-r--r--user/rust/0011-rustc_data_structures-use-libc-types-constants-in-fl.patch209
-rw-r--r--user/rust/0012-Link-stage-2-tools-dynamically-to-libstd.patch (renamed from user/rust/0021-Link-stage-2-tools-dynamically-to-libstd.patch)10
-rw-r--r--user/rust/0013-Fix-double_check-tests-on-big-endian-targets.patch75
-rw-r--r--user/rust/0013-Move-debugger-scripts-to-usr-share-rust.patch (renamed from user/rust/0022-Move-debugger-scripts-to-usr-share-rust.patch)18
-rw-r--r--user/rust/0014-Add-foxkit-target-specs.patch (renamed from user/rust/0023-Add-foxkit-target-specs.patch)22
-rw-r--r--user/rust/0014-test-invalid_const_promotion-Accept-SIGTRAP-as-a-val.patch25
-rw-r--r--user/rust/0015-test-linkage-visibility-Ensure-symbols-are-visible-t.patch30
-rw-r--r--user/rust/0016-x.py-Use-python3-instead-of-python.patch22
-rw-r--r--user/rust/0017-test-target-feature-gate-Only-run-on-relevant-target.patch37
-rw-r--r--user/rust/0030-libc-linkage.patch23
-rw-r--r--user/rust/0030-liblibc-linkage.patch17
-rw-r--r--user/rust/0031-liblibc-1b130d4c349d.patch126
-rw-r--r--user/rust/APKBUILD94
-rw-r--r--user/wireguard-module-power8-64k/APKBUILD4
-rw-r--r--user/wireguard-module-power8/APKBUILD4
-rw-r--r--user/wireguard-module/APKBUILD4
-rw-r--r--user/wireguard-tools/APKBUILD4
-rw-r--r--user/wpa_supplicant/APKBUILD27
51 files changed, 4818 insertions, 1095 deletions
diff --git a/system/bc/APKBUILD b/system/bc/APKBUILD
index d234fa61f..0109fab0e 100644
--- a/system/bc/APKBUILD
+++ b/system/bc/APKBUILD
@@ -1,20 +1,20 @@
# Contributor: A. Wilcox <awilfox@adelielinux.org>
# Maintainer: A. Wilcox <awilfox@adelielinux.org>
pkgname=bc
-pkgver=1.1.4
+pkgver=1.2.3
pkgrel=0
pkgdesc="An arbitrary precision numeric processing language (calculator)"
url="https://github.com/gavinhoward/bc"
arch="all"
-license="BSD-0-Clause"
+license="BSD-2-Clause"
depends=""
makedepends=""
-subpackages="$pkgname-doc"
+subpackages="$pkgname-doc $pkgname-lang"
source="https://github.com/gavinhoward/bc/releases/download/${pkgver/_/-}/bc-${pkgver/_/-}.tar.xz"
builddir="$srcdir"/$pkgname-${pkgver/_/-}
build() {
- PREFIX="/usr" ./configure.sh -g -G -O3
+ PREFIX="/usr" DESTDIR="$pkgdir" ./configure.sh -g -G -O3
make
}
@@ -23,7 +23,7 @@ check() {
}
package() {
- make PREFIX="/usr" DESTDIR="$pkgdir" install
+ make install
}
-sha512sums="fa67325cc3cb5df7513e6d0ae74d3476d7d9e87722db2f24d0cf0781622f02ec99e6ab27d3e2d57866830dd18dc43eb3c52d460be6c6ec0260ce2bad7765d7aa bc-1.1.4.tar.xz"
+sha512sums="5277177a0627b00d8022f37060f6496312ae0f25e41521b5d7276c0e177f1ee7605f148b0bdee1f526fbc5d00e36a2c1ecbf4f808978e6f55e2745f327bd3bdd bc-1.2.3.tar.xz"
diff --git a/user/alpine/APKBUILD b/user/alpine/APKBUILD
new file mode 100644
index 000000000..189057dea
--- /dev/null
+++ b/user/alpine/APKBUILD
@@ -0,0 +1,46 @@
+# Contributor: Kiyoshi Aman <kiyoshi.aman+adelie@gmail.com>
+# Maintainer: Kiyoshi Aman <kiyoshi.aman+adelie@gmail.com>
+pkgname=alpine
+pkgver=2.21
+pkgrel=0
+pkgdesc="Terminal-based email client"
+url="http://alpine.x10host.com/alpine/"
+arch="all"
+license="Apache-2.0"
+depends=""
+makedepends="aspell-dev krb5-dev linux-pam-dev ncurses-dev openldap-dev
+ openssl-dev"
+subpackages="$pkgname-doc"
+source="http://alpine.x10host.com/alpine/release/src/alpine-$pkgver.tar.xz
+ support-musl.patch"
+
+prepare() {
+ cd "$builddir"
+ default_prepare
+ autoreconf -vif
+}
+
+build() {
+ cd "$builddir"
+ ./configure \
+ --build=$CBUILD \
+ --host=$CHOST \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --mandir=/usr/share/man \
+ --localstatedir=/var
+ make
+}
+
+check() {
+ cd "$builddir"
+ make check
+}
+
+package() {
+ cd "$builddir"
+ make DESTDIR="$pkgdir" install
+}
+
+sha512sums="a2a36a033c8af79810816a7da7185c269808ba6d84d013691fd8b3764c63f5fb2284e6844ec5a5e99d168514ae636debf59fae962533a2916679e4e9109c6264 alpine-2.21.tar.xz
+cdf827b5947fd14b6a6c1ad86df4ff1b8be1ffd51279aaa02376b2b60f3de742c54364e18d73d9a75c5c87ed3c8668a3f0c9eb59733f8da195e85833b4364782 support-musl.patch"
diff --git a/user/alpine/support-musl.patch b/user/alpine/support-musl.patch
new file mode 100644
index 000000000..ab3c2d857
--- /dev/null
+++ b/user/alpine/support-musl.patch
@@ -0,0 +1,20 @@
+--- ./configure.ac.orig
++++ ./configure.ac
+@@ -723,7 +723,7 @@
+ else
+ dnl preload c-client default locations/options
+ case $host in
+- *-linux-gnu*|*-k*bsd*-gnu*|*-gnu*)
++ *-linux-gnu*|*-k*bsd*-gnu*|*-gnu*|*-linux-musl*)
+ if test -f /etc/fedora-release -o -f /etc/redhat-release -o -f /etc/redhat_version ; then
+ alpine_SSLTYPE="nopwd"
+ if test -d /etc/pki/tls ; then
+@@ -1059,7 +1059,7 @@
+ dnl build from c-client. Most of this will go away when c-client
+ dnl adopts configure
+ case "$host" in
+- *-linux-gnu*|*-k*bsd*-gnu*|*-gnu*)
++ *-linux-gnu*|*-k*bsd*-gnu*|*-gnu*|*-linux-musl*)
+ alpine_path_delim="/"
+ alpine_mode_readonly="(0600)"
+ if test -f /etc/fedora-release ; then
diff --git a/user/dosbox/APKBUILD b/user/dosbox/APKBUILD
index 0b9418d50..e8674b531 100644
--- a/user/dosbox/APKBUILD
+++ b/user/dosbox/APKBUILD
@@ -2,7 +2,7 @@
# Maintainer: Horst Burkhardt <horst@adelielinux.org>
pkgname=dosbox
pkgver=0.82.7
-pkgrel=1
+pkgrel=2
pkgdesc="Emulator for MS-DOS games"
url="https://github.com/joncampbell123/dosbox-x/"
arch="all !s390x"
@@ -15,6 +15,7 @@ source="https://github.com/joncampbell123/dosbox-x/archive/dosbox-x-wip-20180513
asmfix.patch
posix-headers.patch
constness.patch
+ gcc8-pmmx.patch
"
builddir="$srcdir/dosbox-x-dosbox-x-wip-20180513-1316"
@@ -40,4 +41,5 @@ sha512sums="38b7423b695f3b0dfd26f8c8913f26afba73ea6235bb205ec3a5d7f08ef3c74a9b6c
daf0efea03d6295f5a20d8d197f7d0ba38e0608edcfe8be19fc6091b783885d523557674f0f7df83b88186b77794723106cf7a0e02125d2cc75ecfd7d51fa91d porttalk-fix.patch
266ede57d21030f7752287f18660b47e9185a9aef9d022337dded9c9ce93e572691b41aca878f8a28c67bf5d217c7e84912336c21aacf594085dc31d99862ebf asmfix.patch
462e84d5bac8fb2c1317b303d1bdda72a50e9eedfb5251450cb8d0f98f4297d6a483eb2c95624cbc8d25e917788032dd1e7d8b0d06d82dad6600f8bd6eec85f4 posix-headers.patch
-775d8f209ea04a81fda5c3dcf8186b529248befb962019c66e60344a5e07c95462f9cc4acf0f82c44635fc86f1b4817030792a1900d36e6d911b6f782722f320 constness.patch"
+775d8f209ea04a81fda5c3dcf8186b529248befb962019c66e60344a5e07c95462f9cc4acf0f82c44635fc86f1b4817030792a1900d36e6d911b6f782722f320 constness.patch
+0e23ccef2c238583b1fbbf598da95f6e5f805edef37bd08d0d0c4a69e92f11b935b94dd3a8d889dc9587ec264b8540169b2858116f20d8467c345a179893090d gcc8-pmmx.patch"
diff --git a/user/dosbox/gcc8-pmmx.patch b/user/dosbox/gcc8-pmmx.patch
new file mode 100644
index 000000000..3b86802ed
--- /dev/null
+++ b/user/dosbox/gcc8-pmmx.patch
@@ -0,0 +1,32 @@
+From 771f1c8304253fcb95aef54da739f43712aea726 Mon Sep 17 00:00:00 2001
+From: Jonathan Campbell <jonathan@castus.tv>
+Date: Sun, 20 May 2018 19:30:58 -0700
+Subject: [PATCH] Suddenly MinGW GCC 7.3.0 does not allow the dynamic core to
+ declare EBP clobbered. Fix dynamic core entry appropriately
+
+---
+ src/cpu/core_dyn_x86/risc_x86.h | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/src/cpu/core_dyn_x86/risc_x86.h b/src/cpu/core_dyn_x86/risc_x86.h
+index dea5ae0ca..d2a847a4f 100644
+--- a/src/cpu/core_dyn_x86/risc_x86.h
++++ b/src/cpu/core_dyn_x86/risc_x86.h
+@@ -130,13 +130,15 @@ static BlockReturn gen_runcode(Bit8u * code) {
+ #else
+ register Bit32u tempflags=reg_flags & FMASK_TEST;
+ __asm__ volatile (
++ "pushl %%ebp \n"
+- "call 1f ; 1: addl $run_return_adress-.,(%%esp) \n"
++ "pushl $(run_return_adress) \n"
+ "pushl %2 \n"
+ "jmp *%3 \n"
+ "run_return_adress: \n"
++ "popl %%ebp \n"
+ :"=a" (retval), "=c" (tempflags)
+ :"r" (tempflags),"r" (code)
+- :"%edx","%ebx","%edi","%esi","%ebp","cc","memory"
++ :"%edx","%ebx","%edi","%esi"/*,"%ebp"*/,"cc","memory" /* NTS: GCC 7.3.0 MinGW suddenly will not allow this code to declare EBP clobbered */
+ );
+ reg_flags=(reg_flags & ~FMASK_TEST) | (tempflags & FMASK_TEST);
+ #endif
diff --git a/user/llvm7/APKBUILD b/user/llvm7/APKBUILD
index 53cf78e63..d4b78cef0 100644
--- a/user/llvm7/APKBUILD
+++ b/user/llvm7/APKBUILD
@@ -246,7 +246,7 @@ f84cd65d7042e89826ba6e8d48c4c302bf4980da369d7f19a55f217e51c00ca8ed178d453df3a3ce
49c47f125014b60d0ea7870f981a2c1708ad705793f89287ed846ee881a837a4dc0170bf467e03f2ef56177473128945287749ac80dc2d13cfabcf8b929ba58a disable-FileSystemTest.CreateDir-perms-assert.patch
caeec8e4dbd92f5f74940780b69075f3879a267a8623822cbdc193fd14706eb089071e3a5a20d60cc2eca59e4c5b2a61d29827a2f3362ee7c5f74f11d9ace200 disable-dlclose-test.patch
e5ddbc4b6c4928e79846dc3c022eb7928aaa8fed40515c78f5f03b8ab8264f34f1eb8aa8bfc0f436450932f4917e54ad261603032092ea271d9590f11a37cf1e musl-ppc64-elfv2.patch
-8c0e2a08f6b503efb6673af4cb475ed788b288e016881eacb314a74b9cdd1a920853b219f1cdf1c20e67dec9fcceedfa37e726820b28cd0454302397188aac2f more-secure-plt.patch
+dcd34b81c2f2843cd05b2e3b88e268045a2ec6ed8677630fc8904f38b2131cbc0ba37dde461e28b1c301d5e95b634b2e225797f8a5585c0cabc69e3b7e78de85 more-secure-plt.patch
deb71762721ebc73bfdf23143b582f40c70eddcef3e337ed14499e8e336bee2906292d38d64fe98fa633430c1bcb66cf6a2e067258c8fbe6e931f99f6d10a6f7 even-more-secure-plt.patch
c3f596a1578a07ce0ee40c4e2576fe05ca6ca0c1b4f94b1f74c55cb09603afe7c846db9294fe28d83ca48633086bad422218e6d06e0d92173143fb298e06fb38 ppc32-calling-convention.patch
53cc0d13dd871e9b775bb4e7567de4f9a97d91b8246cd7ce74607fd88d6e3e2ab9455f5b4195bc7f9dbdedbc77d659d43e98ec0b7cd78cd395aaea6919510287 python3-test.patch"
diff --git a/user/llvm7/more-secure-plt.patch b/user/llvm7/more-secure-plt.patch
index 64920c74c..1a32eea4f 100644
--- a/user/llvm7/more-secure-plt.patch
+++ b/user/llvm7/more-secure-plt.patch
@@ -1,3 +1,16 @@
+--- a/lib/Target/PowerPC/PPCSubtarget.cpp
++++ b/lib/Target/PowerPC/PPCSubtarget.cpp
+@@ -138,6 +138,10 @@
+ if (isDarwin())
+ HasLazyResolverStubs = true;
+
++ // Set up musl-specific properties.
++ if (TargetTriple.getEnvironment() == Triple::Musl)
++ SecurePlt = true;
++
+ if (HasSPE && IsPPC64)
+ report_fatal_error( "SPE is only supported for 32-bit targets.\n", false);
+ if (HasSPE && (HasAltivec || HasQPX || HasVSX || HasFPU))
diff --git a/lib/Target/PowerPC/PPCTargetMachine.cpp b/lib/Target/PowerPC/PPCTargetMachine.cpp
index c583fba8cab..6a9eedf89c5 100644
--- a/lib/Target/PowerPC/PPCTargetMachine.cpp
diff --git a/user/node/APKBUILD b/user/node/APKBUILD
new file mode 100644
index 000000000..ba3ff6807
--- /dev/null
+++ b/user/node/APKBUILD
@@ -0,0 +1,71 @@
+# Contributor: A. Wilcox <awilfox@adelielinux.org>
+# Maintainer: A. Wilcox <awilfox@adelielinux.org>
+pkgname=node
+pkgver=10.15.3
+pkgrel=0
+pkgdesc="JavaScript runtime"
+url="https://nodejs.org/"
+arch="all"
+license="MIT AND ICU AND BSD-3-Clause AND BSD-2-Clause AND ISC AND OpenSSL AND Public-Domain AND Zlib AND Artistic-2.0 AND Apache-2.0 AND CC0-1.0"
+depends=""
+makedepends="c-ares-dev http-parser-dev icu-dev libexecinfo-dev libuv-dev
+ nghttp2-dev python3 zlib-dev"
+subpackages="$pkgname-dev $pkgname-doc"
+source="https://nodejs.org/download/release/v$pkgver/node-v$pkgver.tar.xz
+ https://www.python.org/ftp/python/2.7.15/Python-2.7.15.tar.xz
+ libatomic.patch
+ ppc32.patch
+ ppc64.patch
+ stack-silliness.patch
+ "
+builddir="$srcdir/$pkgname-v$pkgver"
+
+unpack() {
+ default_unpack
+ [ -z $SKIP_PYTHON ] || return 0
+
+ msg "Killing all remaining hope for humanity and building Python 2..."
+ cd "$srcdir/Python-2.7.15"
+ [ -d ../python ] && rm -r ../python
+ # 19:39 <+solar> just make the firefox build process build its own py2 copy
+ # 20:03 <calvin> TheWilfox: there's always violence
+ ./configure --prefix="$srcdir/python"
+ make -j $JOBS
+ make -j $JOBS install
+}
+
+build() {
+ cd "$builddir"
+ export PATH="$srcdir/python/bin:$PATH"
+ # We can't use --shared-openssl until 1.1 is available.
+ python ./configure.py \
+ --prefix=/usr \
+ --with-intl=system-icu \
+ --shared-cares \
+ --shared-http-parser \
+ --shared-libuv \
+ --shared-nghttp2 \
+ --openssl-no-asm \
+ --shared-zlib
+ # keep DESTDIR set, to avoid a full rebuild in package()
+ make DESTDIR="$pkgdir"
+}
+
+check() {
+ cd "$builddir"
+ export PATH="$srcdir/python/bin:$PATH"
+ make DESTDIR="$pkgdir" test-only
+}
+
+package() {
+ cd "$builddir"
+ export PATH="$srcdir/python/bin:$PATH"
+ make DESTDIR="$pkgdir" install
+}
+
+sha512sums="cf741f733af7a7e1fbd37b0f98110078494b4771dbdfccacfda95a5ea4cda6cdcea4f8d31dddcf27477213614e4ab6cf7d1a1f900cb92936333730737ac4f9e8 node-v10.15.3.tar.xz
+27ea43eb45fc68f3d2469d5f07636e10801dee11635a430ec8ec922ed790bb426b072da94df885e4dfa1ea8b7a24f2f56dd92f9b0f51e162330f161216bd6de6 Python-2.7.15.tar.xz
+8f64922d586bce9d82c83042a989739cc55ecc5e015778cdfbda21c257aa50527ddb18740985bcb2068e4a749b71eb8a135d9a8152b374d361589df7f33c9b60 libatomic.patch
+d369cd9685e372368af11ea763defdde7afc789ce5e2f617b47174fb4d45003d6e494a00ef92c9ed098c49c189d1690edf9ce780448a5b4a5b072c20ea35ab95 ppc32.patch
+583326353de5b0ac14a6c42321f6b031bd943a80550624794e15bd7526470f67bfa14a66558db3c94b4ee2db3053d2e4efed2117f4e7b6dca3c59c171048c094 ppc64.patch
+3ea09e36ed0cc31e0475ebc9c92b7609b70e9c1637c5db6c92cf1d6363fb8c6f884ffa20dd81054ca390b721695185327d80c9eeff0688a959e9d46947602471 stack-silliness.patch"
diff --git a/user/node/gyp-python3.patch b/user/node/gyp-python3.patch
new file mode 100644
index 000000000..e52ef19ec
--- /dev/null
+++ b/user/node/gyp-python3.patch
@@ -0,0 +1,3862 @@
+diff --git a/tools/gyp/pylib/gyp/input.py b/pylib/gyp/input.py
+index a046a15..21b4606 100644
+--- a/tools/gyp/pylib/gyp/input.py
++++ b/tools/gyp/pylib/gyp/input.py
+@@ -2,14 +2,8 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
+-from compiler.ast import Const
+-from compiler.ast import Dict
+-from compiler.ast import Discard
+-from compiler.ast import List
+-from compiler.ast import Module
+-from compiler.ast import Node
+-from compiler.ast import Stmt
+-import compiler
++import ast
++
+ import gyp.common
+ import gyp.simple_copy
+ import multiprocessing
+@@ -183,43 +177,38 @@
+ Note that this is slower than eval() is.
+ """
+
+- ast = compiler.parse(file_contents)
+- assert isinstance(ast, Module)
+- c1 = ast.getChildren()
+- assert c1[0] is None
+- assert isinstance(c1[1], Stmt)
+- c2 = c1[1].getChildren()
+- assert isinstance(c2[0], Discard)
+- c3 = c2[0].getChildren()
+- assert len(c3) == 1
+- return CheckNode(c3[0], [])
++ syntax_tree = ast.parse(file_contents)
++ assert isinstance(syntax_tree, ast.Module)
++ c1 = syntax_tree.body
++ assert len(c1) == 1
++ c2 = c1[0]
++ assert isinstance(c2, ast.Expr)
++ return CheckNode(c2.value, [])
+
+
+ def CheckNode(node, keypath):
+- if isinstance(node, Dict):
+- c = node.getChildren()
++ if isinstance(node, ast.Dict):
+ dict = {}
+- for n in range(0, len(c), 2):
+- assert isinstance(c[n], Const)
+- key = c[n].getChildren()[0]
++ for key, value in zip(node.keys, node.values):
++ assert isinstance(key, ast.Str)
++ key = key.s
+ if key in dict:
+ raise GypError("Key '" + key + "' repeated at level " +
+ repr(len(keypath) + 1) + " with key path '" +
+ '.'.join(keypath) + "'")
+ kp = list(keypath) # Make a copy of the list for descending this node.
+ kp.append(key)
+- dict[key] = CheckNode(c[n + 1], kp)
++ dict[key] = CheckNode(value, kp)
+ return dict
+- elif isinstance(node, List):
+- c = node.getChildren()
++ elif isinstance(node, ast.List):
+ children = []
+- for index, child in enumerate(c):
++ for index, child in enumerate(node.elts):
+ kp = list(keypath) # Copy list.
+ kp.append(repr(index))
+ children.append(CheckNode(child, kp))
+ return children
+- elif isinstance(node, Const):
+- return node.getChildren()[0]
++ elif isinstance(node, ast.Str):
++ return node.s
+ else:
+ raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
+ "': " + repr(node))
+diff --git a/tools/gyp/pylib/gyp/simple_copy.py b/pylib/gyp/simple_copy.py
+index 74c98c5..eaf5f8b 100644
+--- a/tools/gyp/pylib/gyp/simple_copy.py
++++ b/tools/gyp/pylib/gyp/simple_copy.py
+@@ -28,8 +28,19 @@
+ def _deepcopy_atomic(x):
+ return x
+
+-for x in (type(None), int, long, float,
+- bool, str, unicode, type):
++try:
++ _string_types = (str, unicode)
++# There's no unicode in python3
++except NameError:
++ _string_types = (str, )
++
++try:
++ _integer_types = (int, long)
++# There's no long in python3
++except NameError:
++ _integer_types = (int, )
++
++for x in (type(None), float, bool, type) + _integer_types + _string_types:
+ d[x] = _deepcopy_atomic
+
+ def _deepcopy_list(x):
+diff --git a/tools/gyp/PRESUBMIT.py b/PRESUBMIT.py
+index 4bc1b8c..5ee669b 100644
+--- a/tools/gyp/PRESUBMIT.py
++++ b/tools/gyp/PRESUBMIT.py
+@@ -76,8 +76,7 @@
+ def _LicenseHeader(input_api):
+ # Accept any year number from 2009 to the current year.
+ current_year = int(input_api.time.strftime('%Y'))
+- allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
+-
++ allowed_years = (str(s) for s in reversed(range(2009, current_year + 1)))
+ years_re = '(' + '|'.join(allowed_years) + ')'
+
+ # The (c) is deprecated, but tolerate it until it's removed from all files.
+diff --git a/tools/gyp/README.md b/README.md
+index c0d73ac..b4766c9 100644
+--- a/tools/gyp/README.md
++++ b/tools/gyp/README.md
+@@ -1,4 +1,5 @@
+ GYP can Generate Your Projects.
+ ===================================
+
+-Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
++Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can
++check out ```md-pages``` branch to read those documents offline.
+diff --git a/tools/gyp/buildbot/buildbot_run.py b/buildbot/buildbot_run.py
+index 9a2b71f..8941652 100755
+--- a/tools/gyp/buildbot/buildbot_run.py
++++ b/tools/gyp/buildbot/buildbot_run.py
+@@ -5,6 +5,8 @@
+
+ """Argument-less script to select what to run on the buildbots."""
+
++from __future__ import print_function
++
+ import os
+ import shutil
+ import subprocess
+@@ -24,14 +26,14 @@
+ with open(os.devnull) as devnull_fd:
+ retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs)
+ if retcode != 0:
+- print '@@@STEP_EXCEPTION@@@'
++ print('@@@STEP_EXCEPTION@@@')
+ sys.exit(1)
+
+
+ def PrepareCmake():
+ """Build CMake 2.8.8 since the version in Precise is 2.8.7."""
+ if os.environ['BUILDBOT_CLOBBER'] == '1':
+- print '@@@BUILD_STEP Clobber CMake checkout@@@'
++ print('@@@BUILD_STEP Clobber CMake checkout@@@')
+ shutil.rmtree(CMAKE_DIR)
+
+ # We always build CMake 2.8.8, so no need to do anything
+@@ -39,10 +41,10 @@
+ if os.path.isdir(CMAKE_DIR):
+ return
+
+- print '@@@BUILD_STEP Initialize CMake checkout@@@'
++ print('@@@BUILD_STEP Initialize CMake checkout@@@')
+ os.mkdir(CMAKE_DIR)
+
+- print '@@@BUILD_STEP Sync CMake@@@'
++ print('@@@BUILD_STEP Sync CMake@@@')
+ CallSubProcess(
+ ['git', 'clone',
+ '--depth', '1',
+@@ -53,7 +55,7 @@
+ CMAKE_DIR],
+ cwd=CMAKE_DIR)
+
+- print '@@@BUILD_STEP Build CMake@@@'
++ print('@@@BUILD_STEP Build CMake@@@')
+ CallSubProcess(
+ ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
+ cwd=CMAKE_DIR)
+@@ -74,7 +76,7 @@
+ if not format:
+ format = title
+
+- print '@@@BUILD_STEP ' + title + '@@@'
++ print('@@@BUILD_STEP ' + title + '@@@')
+ sys.stdout.flush()
+ env = os.environ.copy()
+ if msvs_version:
+@@ -89,17 +91,17 @@
+ retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
+ if retcode:
+ # Emit failure tag, and keep going.
+- print '@@@STEP_FAILURE@@@'
++ print('@@@STEP_FAILURE@@@')
+ return 1
+ return 0
+
+
+ def GypBuild():
+ # Dump out/ directory.
+- print '@@@BUILD_STEP cleanup@@@'
+- print 'Removing %s...' % OUT_DIR
++ print('@@@BUILD_STEP cleanup@@@')
++ print('Removing %s...' % OUT_DIR)
+ shutil.rmtree(OUT_DIR, ignore_errors=True)
+- print 'Done.'
++ print('Done.')
+
+ retcode = 0
+ if sys.platform.startswith('linux'):
+@@ -128,7 +130,7 @@
+ # after the build proper that could be used for cumulative failures),
+ # use that instead of this. This isolates the final return value so
+ # that it isn't misattributed to the last stage.
+- print '@@@BUILD_STEP failures@@@'
++ print('@@@BUILD_STEP failures@@@')
+ sys.exit(retcode)
+
+
+diff --git a/tools/gyp/pylib/gyp/MSVSSettings.py b/pylib/gyp/MSVSSettings.py
+index 8ae1918..1d2e25a 100644
+--- a/tools/gyp/pylib/gyp/MSVSSettings.py
++++ b/tools/gyp/pylib/gyp/MSVSSettings.py
+@@ -14,9 +14,17 @@
+ MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
+ """
+
++from __future__ import print_function
++
+ import sys
+ import re
+
++try:
++ # basestring was removed in python3.
++ basestring
++except NameError:
++ basestring = str
++
+ # Dictionaries of settings validators. The key is the tool name, the value is
+ # a dictionary mapping setting names to validation functions.
+ _msvs_validators = {}
+@@ -400,7 +408,7 @@
+
+ if unrecognized:
+ # We don't know this setting. Give a warning.
+- print >> stderr, error_msg
++ print(error_msg, file=stderr)
+
+
+ def FixVCMacroSlashes(s):
+@@ -433,7 +441,7 @@
+ '$(PlatformName)': '$(Platform)',
+ '$(SafeInputName)': '%(Filename)',
+ }
+- for old, new in replace_map.iteritems():
++ for old, new in replace_map.items():
+ s = s.replace(old, new)
+ s = FixVCMacroSlashes(s)
+ return s
+@@ -453,17 +461,18 @@
+ dictionaries of settings and their values.
+ """
+ msbuild_settings = {}
+- for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
++ for msvs_tool_name, msvs_tool_settings in msvs_settings.items():
+ if msvs_tool_name in _msvs_to_msbuild_converters:
+ msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
+- for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
++ for msvs_setting, msvs_value in msvs_tool_settings.items():
+ if msvs_setting in msvs_tool:
+ # Invoke the translation function.
+ try:
+ msvs_tool[msvs_setting](msvs_value, msbuild_settings)
+- except ValueError, e:
+- print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
+- '%s' % (msvs_tool_name, msvs_setting, e))
++ except ValueError as e:
++ print(('Warning: while converting %s/%s to MSBuild, '
++ '%s' % (msvs_tool_name, msvs_setting, e)),
++ file=stderr)
+ else:
+ _ValidateExclusionSetting(msvs_setting,
+ msvs_tool,
+@@ -472,8 +481,8 @@
+ (msvs_tool_name, msvs_setting)),
+ stderr)
+ else:
+- print >> stderr, ('Warning: unrecognized tool %s while converting to '
+- 'MSBuild.' % msvs_tool_name)
++ print(('Warning: unrecognized tool %s while converting to '
++ 'MSBuild.' % msvs_tool_name), file=stderr)
+ return msbuild_settings
+
+
+@@ -513,13 +522,13 @@
+ for tool_name in settings:
+ if tool_name in validators:
+ tool_validators = validators[tool_name]
+- for setting, value in settings[tool_name].iteritems():
++ for setting, value in settings[tool_name].items():
+ if setting in tool_validators:
+ try:
+ tool_validators[setting](value)
+- except ValueError, e:
+- print >> stderr, ('Warning: for %s/%s, %s' %
+- (tool_name, setting, e))
++ except ValueError as e:
++ print(('Warning: for %s/%s, %s' %
++ (tool_name, setting, e)), file=stderr)
+ else:
+ _ValidateExclusionSetting(setting,
+ tool_validators,
+@@ -528,7 +537,7 @@
+ stderr)
+
+ else:
+- print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
++ print(('Warning: unrecognized tool %s' % tool_name), file=stderr)
+
+
+ # MSVS and MBuild names of the tools.
+diff --git a/tools/gyp/pylib/gyp/MSVSSettings_test.py b/pylib/gyp/MSVSSettings_test.py
+index bf6ea6b..73ed25e 100755
+--- a/tools/gyp/pylib/gyp/MSVSSettings_test.py
++++ b/tools/gyp/pylib/gyp/MSVSSettings_test.py
+@@ -6,7 +6,10 @@
+
+ """Unit tests for the MSVSSettings.py file."""
+
+-import StringIO
++try:
++ from StringIO import StringIO
++except ImportError:
++ from io import StringIO
+ import unittest
+ import gyp.MSVSSettings as MSVSSettings
+
+@@ -14,7 +17,7 @@
+ class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+- self.stderr = StringIO.StringIO()
++ self.stderr = StringIO()
+
+ def _ExpectedWarnings(self, expected):
+ """Compares recorded lines to expected warnings."""
+diff --git a/tools/gyp/pylib/gyp/MSVSUserFile.py b/pylib/gyp/MSVSUserFile.py
+index 6c07e9a..2264d64 100644
+--- a/tools/gyp/pylib/gyp/MSVSUserFile.py
++++ b/tools/gyp/pylib/gyp/MSVSUserFile.py
+@@ -91,7 +91,7 @@
+
+ if environment and isinstance(environment, dict):
+ env_list = ['%s="%s"' % (key, val)
+- for (key,val) in environment.iteritems()]
++ for (key,val) in environment.items()]
+ environment = ' '.join(env_list)
+ else:
+ environment = ''
+@@ -135,7 +135,7 @@
+ def WriteIfChanged(self):
+ """Writes the user file."""
+ configs = ['Configurations']
+- for config, spec in sorted(self.configurations.iteritems()):
++ for config, spec in sorted(self.configurations.items()):
+ configs.append(spec)
+
+ content = ['VisualStudioUserFile',
+diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/pylib/gyp/MSVSUtil.py
+index 96dea6c..f24530b 100644
+--- a/tools/gyp/pylib/gyp/MSVSUtil.py
++++ b/tools/gyp/pylib/gyp/MSVSUtil.py
+@@ -236,7 +236,7 @@
+
+ # Set up the shim to output its PDB to the same location as the final linker
+ # target.
+- for config_name, config in shim_dict.get('configurations').iteritems():
++ for config_name, config in shim_dict.get('configurations').items():
+ pdb_path = _GetPdbPath(target_dict, config_name, vars)
+
+ # A few keys that we don't want to propagate.
+diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/pylib/gyp/MSVSVersion.py
+index 44b958d..5f316b6 100644
+--- a/tools/gyp/pylib/gyp/MSVSVersion.py
++++ b/tools/gyp/pylib/gyp/MSVSVersion.py
+@@ -189,7 +189,7 @@
+ text = None
+ try:
+ text = _RegistryQueryBase('Sysnative', key, value)
+- except OSError, e:
++ except OSError as e:
+ if e.errno == errno.ENOENT:
+ text = _RegistryQueryBase('System32', key, value)
+ else:
+@@ -207,12 +207,15 @@
+ contents of the registry key's value, or None on failure. Throws
+ ImportError if _winreg is unavailable.
+ """
+- import _winreg
++ try:
++ import _winreg as winreg
++ except ImportError:
++ import winreg
+ try:
+ root, subkey = key.split('\\', 1)
+ assert root == 'HKLM' # Only need HKLM for now.
+- with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
+- return _winreg.QueryValueEx(hkey, value)[0]
++ with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
++ return winreg.QueryValueEx(hkey, value)[0]
+ except WindowsError:
+ return None
+
+diff --git a/tools/gyp/pylib/gyp/__init__.py b/pylib/gyp/__init__.py
+index 668f38b..e038151 100755
+--- a/tools/gyp/pylib/gyp/__init__.py
++++ b/tools/gyp/pylib/gyp/__init__.py
+@@ -4,6 +4,8 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
++from __future__ import print_function
++
+ import copy
+ import gyp.input
+ import optparse
+@@ -14,6 +16,12 @@
+ import traceback
+ from gyp.common import GypError
+
++try:
++ # basestring was removed in python3.
++ basestring
++except NameError:
++ basestring = str
++
+ # Default debug modes for GYP
+ debug = {}
+
+@@ -22,7 +30,6 @@
+ DEBUG_VARIABLES = 'variables'
+ DEBUG_INCLUDES = 'includes'
+
+-
+ def DebugOutput(mode, message, *args):
+ if 'all' in gyp.debug or mode in gyp.debug:
+ ctx = ('unknown', 0, 'unknown')
+@@ -34,8 +41,8 @@
+ pass
+ if args:
+ message %= args
+- print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
+- ctx[1], ctx[2], message)
++ print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
++ ctx[1], ctx[2], message))
+
+ def FindBuildFiles():
+ extension = '.gyp'
+@@ -207,7 +214,7 @@
+ # We always want to ignore the environment when regenerating, to avoid
+ # duplicate or changed flags in the environment at the time of regeneration.
+ flags = ['--ignore-environment']
+- for name, metadata in options._regeneration_metadata.iteritems():
++ for name, metadata in options._regeneration_metadata.items():
+ opt = metadata['opt']
+ value = getattr(options, name)
+ value_predicate = metadata['type'] == 'path' and FixPath or Noop
+@@ -226,12 +233,13 @@
+ (action == 'store_false' and not value)):
+ flags.append(opt)
+ elif options.use_environment and env_name:
+- print >>sys.stderr, ('Warning: environment regeneration unimplemented '
++ print(('Warning: environment regeneration unimplemented '
+ 'for %s flag %r env_name %r' % (action, opt,
+- env_name))
++ env_name)),
++ file=sys.stderr)
+ else:
+- print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
+- 'flag %r' % (action, opt))
++ print(('Warning: regeneration unimplemented for action %r '
++ 'flag %r' % (action, opt)), file=sys.stderr)
+
+ return flags
+
+@@ -431,12 +439,11 @@
+ for build_file in build_files:
+ build_file_dir = os.path.abspath(os.path.dirname(build_file))
+ build_file_dir_components = build_file_dir.split(os.path.sep)
+- components_len = len(build_file_dir_components)
+- for index in xrange(components_len - 1, -1, -1):
+- if build_file_dir_components[index] == 'src':
++ for component in reversed(build_file_dir_components):
++ if component == 'src':
+ options.depth = os.path.sep.join(build_file_dir_components)
+ break
+- del build_file_dir_components[index]
++ del build_file_dir_components[-1]
+
+ # If the inner loop found something, break without advancing to another
+ # build file.
+@@ -475,7 +482,7 @@
+ if home_dot_gyp != None:
+ default_include = os.path.join(home_dot_gyp, 'include.gypi')
+ if os.path.exists(default_include):
+- print 'Using overrides found in ' + default_include
++ print('Using overrides found in ' + default_include)
+ includes.append(default_include)
+
+ # Command-line --include files come after the default include.
+@@ -490,7 +497,7 @@
+ if options.generator_flags:
+ gen_flags += options.generator_flags
+ generator_flags = NameValueListToDict(gen_flags)
+- if DEBUG_GENERAL in gyp.debug.keys():
++ if DEBUG_GENERAL in gyp.debug:
+ DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
+
+ # Generate all requested formats (use a set in case we got one format request
+@@ -523,7 +530,7 @@
+ generator.GenerateOutput(flat_list, targets, data, params)
+
+ if options.configs:
+- valid_configs = targets[flat_list[0]]['configurations'].keys()
++ valid_configs = targets[flat_list[0]]['configurations']
+ for conf in options.configs:
+ if conf not in valid_configs:
+ raise GypError('Invalid config specified via --build: %s' % conf)
+@@ -536,7 +543,7 @@
+ def main(args):
+ try:
+ return gyp_main(args)
+- except GypError, e:
++ except GypError as e:
+ sys.stderr.write("gyp: %s\n" % e)
+ return 1
+
+diff --git a/tools/gyp/pylib/gyp/common.py b/pylib/gyp/common.py
+index 1b245ec..1823de8 100644
+--- a/tools/gyp/pylib/gyp/common.py
++++ b/tools/gyp/pylib/gyp/common.py
+@@ -345,7 +345,7 @@
+ prefix=os.path.split(filename)[1] + '.gyp.',
+ dir=os.path.split(filename)[0])
+ try:
+- self.tmp_file = os.fdopen(tmp_fd, 'wb')
++ self.tmp_file = os.fdopen(tmp_fd, 'w')
+ except Exception:
+ # Don't leave turds behind.
+ os.unlink(self.tmp_path)
+@@ -363,7 +363,7 @@
+ same = False
+ try:
+ same = filecmp.cmp(self.tmp_path, filename, False)
+- except OSError, e:
++ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+@@ -382,9 +382,9 @@
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+- umask = os.umask(077)
++ umask = os.umask(0o77)
+ os.umask(umask)
+- os.chmod(self.tmp_path, 0666 & ~umask)
++ os.chmod(self.tmp_path, 0o666 & ~umask)
+ if sys.platform == 'win32' and os.path.exists(filename):
+ # NOTE: on windows (but not cygwin) rename will not replace an
+ # existing file, so it must be preceded with a remove. Sadly there
+@@ -471,7 +471,7 @@
+ ''.join([source[0], header] + source[1:]))
+
+ # Make file executable.
+- os.chmod(tool_path, 0755)
++ os.chmod(tool_path, 0o755)
+
+
+ # From Alex Martelli,
+diff --git a/tools/gyp/pylib/gyp/common_test.py b/pylib/gyp/common_test.py
+index ad6f9a1..0b8ada3 100755
+--- a/tools/gyp/pylib/gyp/common_test.py
++++ b/tools/gyp/pylib/gyp/common_test.py
+@@ -63,6 +63,7 @@
+ self.assertFlavor('solaris', 'sunos' , {});
+ self.assertFlavor('linux' , 'linux2' , {});
+ self.assertFlavor('linux' , 'linux3' , {});
++ self.assertFlavor('linux' , 'linux' , {});
+
+ def test_param(self):
+ self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
+diff --git a/tools/gyp/pylib/gyp/easy_xml.py b/pylib/gyp/easy_xml.py
+index 2522efb..15c6651 100644
+--- a/tools/gyp/pylib/gyp/easy_xml.py
++++ b/tools/gyp/pylib/gyp/easy_xml.py
+@@ -6,6 +6,11 @@
+ import os
+ import locale
+
++try:
++ # reduce moved to functools in python3.
++ reduce
++except NameError:
++ from functools import reduce
+
+ def XmlToString(content, encoding='utf-8', pretty=False):
+ """ Writes the XML content to disk, touching the file only if it has changed.
+@@ -80,7 +85,7 @@
+ # Optionally in second position is a dictionary of the attributes.
+ rest = specification[1:]
+ if rest and isinstance(rest[0], dict):
+- for at, val in sorted(rest[0].iteritems()):
++ for at, val in sorted(rest[0].items()):
+ xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
+ rest = rest[1:]
+ if rest:
+diff --git a/tools/gyp/pylib/gyp/easy_xml_test.py b/pylib/gyp/easy_xml_test.py
+index df64354..a1fdb18 100755
+--- a/tools/gyp/pylib/gyp/easy_xml_test.py
++++ b/tools/gyp/pylib/gyp/easy_xml_test.py
+@@ -8,13 +8,16 @@
+
+ import gyp.easy_xml as easy_xml
+ import unittest
+-import StringIO
++try:
++ from StringIO import StringIO
++except ImportError:
++ from io import StringIO
+
+
+ class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+- self.stderr = StringIO.StringIO()
++ self.stderr = StringIO()
+
+ def test_EasyXml_simple(self):
+ self.assertEqual(
+diff --git a/tools/gyp/pylib/gyp/flock_tool.py b/pylib/gyp/flock_tool.py
+index b38d866..81fb79d 100755
+--- a/tools/gyp/pylib/gyp/flock_tool.py
++++ b/tools/gyp/pylib/gyp/flock_tool.py
+@@ -39,7 +39,7 @@
+ # where fcntl.flock(fd, LOCK_EX) always fails
+ # with EBADF, that's why we use this F_SETLK
+ # hack instead.
+- fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
++ fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
+ if sys.platform.startswith('aix'):
+ # Python on AIX is compiled with LARGEFILE support, which changes the
+ # struct size.
+diff --git a/tools/gyp/pylib/gyp/generator/analyzer.py b/pylib/gyp/generator/analyzer.py
+index 921c1a6..b3484dc 100644
+--- a/tools/gyp/pylib/gyp/generator/analyzer.py
++++ b/tools/gyp/pylib/gyp/generator/analyzer.py
+@@ -62,6 +62,8 @@
+ then the "all" target includes "b1" and "b2".
+ """
+
++from __future__ import print_function
++
+ import gyp.common
+ import gyp.ninja_syntax as ninja_syntax
+ import json
+@@ -155,7 +157,7 @@
+ continue
+ result.append(base_path + source)
+ if debug:
+- print 'AddSource', org_source, result[len(result) - 1]
++ print('AddSource', org_source, result[len(result) - 1])
+
+
+ def _ExtractSourcesFromAction(action, base_path, base_path_components,
+@@ -185,7 +187,7 @@
+ base_path += '/'
+
+ if debug:
+- print 'ExtractSources', target, base_path
++ print('ExtractSources', target, base_path)
+
+ results = []
+ if 'sources' in target_dict:
+@@ -278,7 +280,7 @@
+ the root of the source tree."""
+ if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
+ if debug:
+- print 'gyp file modified', build_file
++ print('gyp file modified', build_file)
+ return True
+
+ # First element of included_files is the file itself.
+@@ -291,8 +293,8 @@
+ _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
+ if _ToLocalPath(toplevel_dir, rel_include_file) in files:
+ if debug:
+- print 'included gyp file modified, gyp_file=', build_file, \
+- 'included file=', rel_include_file
++ print('included gyp file modified, gyp_file=', build_file, \
++ 'included file=', rel_include_file)
+ return True
+ return False
+
+@@ -373,7 +375,7 @@
+ # If a build file (or any of its included files) is modified we assume all
+ # targets in the file are modified.
+ if build_file_in_files[build_file]:
+- print 'matching target from modified build file', target_name
++ print('matching target from modified build file', target_name)
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ else:
+@@ -381,7 +383,7 @@
+ toplevel_dir)
+ for source in sources:
+ if _ToGypPath(os.path.normpath(source)) in files:
+- print 'target', target_name, 'matches', source
++ print('target', target_name, 'matches', source)
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ break
+@@ -433,7 +435,7 @@
+ for dep in target.deps:
+ if _DoesTargetDependOnMatchingTargets(dep):
+ target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
+- print '\t', target.name, 'matches by dep', dep.name
++ print('\t', target.name, 'matches by dep', dep.name)
+ return True
+ target.match_status = MATCH_STATUS_DOESNT_MATCH
+ return False
+@@ -445,7 +447,7 @@
+ supplied as input to analyzer.
+ possible_targets: targets to search from."""
+ found = []
+- print 'Targets that matched by dependency:'
++ print('Targets that matched by dependency:')
+ for target in possible_targets:
+ if _DoesTargetDependOnMatchingTargets(target):
+ found.append(target)
+@@ -484,12 +486,13 @@
+ (add_if_no_ancestor or target.requires_build)) or
+ (target.is_static_library and add_if_no_ancestor and
+ not target.is_or_has_linked_ancestor)):
+- print '\t\tadding to compile targets', target.name, 'executable', \
+- target.is_executable, 'added_to_compile_targets', \
+- target.added_to_compile_targets, 'add_if_no_ancestor', \
+- add_if_no_ancestor, 'requires_build', target.requires_build, \
+- 'is_static_library', target.is_static_library, \
++ print('\t\tadding to compile targets', target.name, 'executable',
++ target.is_executable, 'added_to_compile_targets',
++ target.added_to_compile_targets, 'add_if_no_ancestor',
++ add_if_no_ancestor, 'requires_build', target.requires_build,
++ 'is_static_library', target.is_static_library,
+ 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
++ )
+ result.add(target)
+ target.added_to_compile_targets = True
+
+@@ -500,7 +503,7 @@
+ supplied_targets: set of targets supplied to analyzer to search from."""
+ result = set()
+ for target in matching_targets:
+- print 'finding compile targets for match', target.name
++ print('finding compile targets for match', target.name)
+ _AddCompileTargets(target, supplied_targets, True, result)
+ return result
+
+@@ -508,46 +511,46 @@
+ def _WriteOutput(params, **values):
+ """Writes the output, either to stdout or a file is specified."""
+ if 'error' in values:
+- print 'Error:', values['error']
++ print('Error:', values['error'])
+ if 'status' in values:
+- print values['status']
++ print(values['status'])
+ if 'targets' in values:
+ values['targets'].sort()
+- print 'Supplied targets that depend on changed files:'
++ print('Supplied targets that depend on changed files:')
+ for target in values['targets']:
+- print '\t', target
++ print('\t', target)
+ if 'invalid_targets' in values:
+ values['invalid_targets'].sort()
+- print 'The following targets were not found:'
++ print('The following targets were not found:')
+ for target in values['invalid_targets']:
+- print '\t', target
++ print('\t', target)
+ if 'build_targets' in values:
+ values['build_targets'].sort()
+- print 'Targets that require a build:'
++ print('Targets that require a build:')
+ for target in values['build_targets']:
+- print '\t', target
++ print('\t', target)
+ if 'compile_targets' in values:
+ values['compile_targets'].sort()
+- print 'Targets that need to be built:'
++ print('Targets that need to be built:')
+ for target in values['compile_targets']:
+- print '\t', target
++ print('\t', target)
+ if 'test_targets' in values:
+ values['test_targets'].sort()
+- print 'Test targets:'
++ print('Test targets:')
+ for target in values['test_targets']:
+- print '\t', target
++ print('\t', target)
+
+ output_path = params.get('generator_flags', {}).get(
+ 'analyzer_output_path', None)
+ if not output_path:
+- print json.dumps(values)
++ print(json.dumps(values))
+ return
+ try:
+ f = open(output_path, 'w')
+ f.write(json.dumps(values) + '\n')
+ f.close()
+ except IOError as e:
+- print 'Error writing to output file', output_path, str(e)
++ print('Error writing to output file', output_path, str(e))
+
+
+ def _WasGypIncludeFileModified(params, files):
+@@ -556,7 +559,7 @@
+ if params['options'].includes:
+ for include in params['options'].includes:
+ if _ToGypPath(os.path.normpath(include)) in files:
+- print 'Include file modified, assuming all changed', include
++ print('Include file modified, assuming all changed', include)
+ return True
+ return False
+
+@@ -638,13 +641,13 @@
+ set(self._root_targets))]
+ else:
+ test_targets = [x for x in test_targets_no_all]
+- print 'supplied test_targets'
++ print('supplied test_targets')
+ for target_name in self._test_target_names:
+- print '\t', target_name
+- print 'found test_targets'
++ print('\t', target_name)
++ print('found test_targets')
+ for target in test_targets:
+- print '\t', target.name
+- print 'searching for matching test targets'
++ print('\t', target.name)
++ print('searching for matching test targets')
+ matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
+ matching_test_targets_contains_all = (test_target_names_contains_all and
+ set(matching_test_targets) &
+@@ -654,14 +657,14 @@
+ # 'all' is subsequentely added to the matching names below.
+ matching_test_targets = [x for x in (set(matching_test_targets) &
+ set(test_targets_no_all))]
+- print 'matched test_targets'
++ print('matched test_targets')
+ for target in matching_test_targets:
+- print '\t', target.name
++ print('\t', target.name)
+ matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in matching_test_targets]
+ if matching_test_targets_contains_all:
+ matching_target_names.append('all')
+- print '\tall'
++ print('\tall')
+ return matching_target_names
+
+ def find_matching_compile_target_names(self):
+@@ -669,7 +672,7 @@
+ assert self.is_build_impacted();
+ # Compile targets are found by searching up from changed targets.
+ # Reset the visited status for _GetBuildTargets.
+- for target in self._name_to_target.itervalues():
++ for target in self._name_to_target.values():
+ target.visited = False
+
+ supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
+@@ -677,10 +680,10 @@
+ if 'all' in self._supplied_target_names():
+ supplied_targets = [x for x in (set(supplied_targets) |
+ set(self._root_targets))]
+- print 'Supplied test_targets & compile_targets'
++ print('Supplied test_targets & compile_targets')
+ for target in supplied_targets:
+- print '\t', target.name
+- print 'Finding compile targets'
++ print('\t', target.name)
++ print('Finding compile targets')
+ compile_targets = _GetCompileTargets(self._changed_targets,
+ supplied_targets)
+ return [gyp.common.ParseQualifiedTarget(target.name)[1]
+@@ -699,7 +702,7 @@
+
+ toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
+ if debug:
+- print 'toplevel_dir', toplevel_dir
++ print('toplevel_dir', toplevel_dir)
+
+ if _WasGypIncludeFileModified(params, config.files):
+ result_dict = { 'status': all_changed_string,
+diff --git a/tools/gyp/pylib/gyp/generator/cmake.py b/pylib/gyp/generator/cmake.py
+index a2b9629..4a2041c 100644
+--- a/tools/gyp/pylib/gyp/generator/cmake.py
++++ b/tools/gyp/pylib/gyp/generator/cmake.py
+@@ -28,6 +28,8 @@
+ CMakeLists.txt file.
+ """
+
++from __future__ import print_function
++
+ import multiprocessing
+ import os
+ import signal
+@@ -36,6 +38,12 @@
+ import gyp.common
+ import gyp.xcode_emulation
+
++try:
++ # maketrans moved to str in python3.
++ _maketrans = string.maketrans
++except NameError:
++ _maketrans = str.maketrans
++
+ generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+@@ -238,7 +246,7 @@
+ Invalid for make: ':'
+ Invalid for unknown reasons but cause failures: '.'
+ """
+- return a.translate(string.maketrans(' /():."', '_______'))
++ return a.translate(_maketrans(' /():."', '_______'))
+
+
+ def WriteActions(target_name, actions, extra_sources, extra_deps,
+@@ -644,8 +652,8 @@
+
+ cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
+ if cmake_target_type is None:
+- print ('Target %s has unknown target type %s, skipping.' %
+- ( target_name, target_type ) )
++ print('Target %s has unknown target type %s, skipping.' %
++ ( target_name, target_type ))
+ return
+
+ SetVariable(output, 'TARGET', target_name)
+@@ -868,8 +876,8 @@
+ default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
+
+ elif target_type != 'executable':
+- print ('ERROR: What output file should be generated?',
+- 'type', target_type, 'target', target_name)
++ print(('ERROR: What output file should be generated?',
++ 'type', target_type, 'target', target_name))
+
+ product_prefix = spec.get('product_prefix', default_product_prefix)
+ product_name = spec.get('product_name', default_product_name)
+@@ -1207,11 +1215,11 @@
+ output_dir,
+ config_name))
+ arguments = ['cmake', '-G', 'Ninja']
+- print 'Generating [%s]: %s' % (config_name, arguments)
++ print('Generating [%s]: %s' % (config_name, arguments))
+ subprocess.check_call(arguments, cwd=build_dir)
+
+ arguments = ['ninja', '-C', build_dir]
+- print 'Building [%s]: %s' % (config_name, arguments)
++ print('Building [%s]: %s' % (config_name, arguments))
+ subprocess.check_call(arguments)
+
+
+@@ -1230,7 +1238,7 @@
+ GenerateOutputForConfig(target_list, target_dicts, data,
+ params, user_config)
+ else:
+- config_names = target_dicts[target_list[0]]['configurations'].keys()
++ config_names = target_dicts[target_list[0]]['configurations']
+ if params['parallel']:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+@@ -1239,7 +1247,7 @@
+ arglists.append((target_list, target_dicts, data,
+ params, config_name))
+ pool.map(CallGenerateOutputForConfig, arglists)
+- except KeyboardInterrupt, e:
++ except KeyboardInterrupt as e:
+ pool.terminate()
+ raise e
+ else:
+diff --git a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py b/pylib/gyp/generator/dump_dependency_json.py
+index 160eafe..2bf3f39 100644
+--- a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
++++ b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
+@@ -2,6 +2,8 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
++from __future__ import print_function
++
+ import collections
+ import os
+ import gyp
+@@ -96,4 +98,4 @@
+ f = open(filename, 'w')
+ json.dump(edges, f)
+ f.close()
+- print 'Wrote json to %s.' % filename
++ print('Wrote json to %s.' % filename)
+diff --git a/tools/gyp/pylib/gyp/generator/eclipse.py b/pylib/gyp/generator/eclipse.py
+index 3544347..d039f03 100644
+--- a/tools/gyp/pylib/gyp/generator/eclipse.py
++++ b/tools/gyp/pylib/gyp/generator/eclipse.py
+@@ -141,7 +141,7 @@
+ compiler_includes_list.append(include_dir)
+
+ # Find standard gyp include dirs.
+- if config.has_key('include_dirs'):
++ if 'include_dirs' in config:
+ include_dirs = config['include_dirs']
+ for shared_intermediate_dir in shared_intermediate_dirs:
+ for include_dir in include_dirs:
+@@ -272,7 +272,7 @@
+ out.write(' <language name="holder for library settings"></language>\n')
+ for lang in eclipse_langs:
+ out.write(' <language name="%s">\n' % lang)
+- for key in sorted(defines.iterkeys()):
++ for key in sorted(defines.keys()):
+ out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
+ (escape(key), escape(defines[key])))
+ out.write(' </language>\n')
+@@ -418,7 +418,7 @@
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ user_config)
+ else:
+- config_names = target_dicts[target_list[0]]['configurations'].keys()
++ config_names = target_dicts[target_list[0]]['configurations']
+ for config_name in config_names:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name)
+diff --git a/tools/gyp/pylib/gyp/generator/gypd.py b/pylib/gyp/generator/gypd.py
+index 3efdb99..78eeaa6 100644
+--- a/tools/gyp/pylib/gyp/generator/gypd.py
++++ b/tools/gyp/pylib/gyp/generator/gypd.py
+@@ -88,7 +88,7 @@
+ if not output_file in output_files:
+ output_files[output_file] = input_file
+
+- for output_file, input_file in output_files.iteritems():
++ for output_file, input_file in output_files.items():
+ output = open(output_file, 'w')
+ pprint.pprint(data[input_file], output)
+ output.close()
+diff --git a/tools/gyp/pylib/gyp/generator/make.py b/pylib/gyp/generator/make.py
+index fb4f918..2057e3a 100644
+--- a/tools/gyp/pylib/gyp/generator/make.py
++++ b/tools/gyp/pylib/gyp/generator/make.py
+@@ -21,6 +21,8 @@
+ # toplevel Makefile. It may make sense to generate some .mk files on
+ # the side to keep the the files readable.
+
++from __future__ import print_function
++
+ import os
+ import re
+ import sys
+@@ -668,7 +670,7 @@
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+- for basename, files in basenames.iteritems():
++ for basename, files in basenames.items():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+@@ -816,7 +818,7 @@
+ gyp.xcode_emulation.MacPrefixHeader(
+ self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
+ self.Pchify))
+- sources = filter(Compilable, all_sources)
++ sources = [x for x in all_sources if Compilable(x)]
+ if sources:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
+ extensions = set([os.path.splitext(s)[1] for s in sources])
+@@ -945,7 +947,7 @@
+ '%s%s'
+ % (name, cd_action, command))
+ self.WriteLn()
+- outputs = map(self.Absolutify, outputs)
++ outputs = [self.Absolutify(o) for o in outputs]
+ # The makefile rules are all relative to the top dir, but the gyp actions
+ # are defined relative to their containing dir. This replaces the obj
+ # variable for the action rule with an absolute version so that the output
+@@ -1035,7 +1037,7 @@
+ outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
+ inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
+
+- outputs = map(self.Absolutify, outputs)
++ outputs = [self.Absolutify(o) for o in outputs]
+ all_outputs += outputs
+ # Only write the 'obj' and 'builddir' rules for the "primary" output
+ # (:1); it's superfluous for the "extra outputs", and this avoids
+@@ -1233,11 +1235,11 @@
+ self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
+ includes = config.get('include_dirs')
+ if includes:
+- includes = map(Sourceify, map(self.Absolutify, includes))
++ includes = [Sourceify(self.Absolutify(include)) for include in includes]
+ self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
+
+ compilable = filter(Compilable, sources)
+- objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
++ objs = [self.Objectify(self.Absolutify(Target(x))) for x in compilable]
+ self.WriteList(objs, 'OBJS')
+
+ for obj in objs:
+@@ -1309,7 +1311,7 @@
+
+ # If there are any object files in our input file list, link them into our
+ # output.
+- extra_link_deps += filter(Linkable, sources)
++ extra_link_deps += [source for source in sources if Linkable(source)]
+
+ self.WriteLn()
+
+@@ -1377,8 +1379,8 @@
+ elif self.type == 'none':
+ target = '%s.stamp' % target
+ elif self.type != 'executable':
+- print ("ERROR: What output file should be generated?",
+- "type", self.type, "target", target)
++ print(("ERROR: What output file should be generated?",
++ "type", self.type, "target", target))
+
+ target_prefix = spec.get('product_prefix', target_prefix)
+ target = spec.get('product_name', target)
+@@ -1542,9 +1544,9 @@
+ # Postbuilds expect to be run in the gyp file's directory, so insert an
+ # implicit postbuild to cd to there.
+ postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
+- for i in xrange(len(postbuilds)):
+- if not postbuilds[i].startswith('$'):
+- postbuilds[i] = EscapeShellArgument(postbuilds[i])
++ for i, postbuild in enumerate(postbuilds):
++ if not postbuild.startswith('$'):
++ postbuilds[i] = EscapeShellArgument(postbuild)
+ self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
+ self.WriteLn('%s: POSTBUILDS := %s' % (
+ QuoteSpaces(self.output), ' '.join(postbuilds)))
+@@ -1634,7 +1636,7 @@
+ self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
+ postbuilds=postbuilds)
+ else:
+- print "WARNING: no output for", self.type, target
++ print("WARNING: no output for", self.type, target)
+
+ # Add an alias for each target (if there are any outputs).
+ # Installable target aliases are created below.
+@@ -1741,7 +1743,7 @@
+ output is just a name to run the rule
+ command: (optional) command name to generate unambiguous labels
+ """
+- outputs = map(QuoteSpaces, outputs)
++ outputs = [QuoteSpaces(o) for o in outputs]
+ inputs = map(QuoteSpaces, inputs)
+
+ if comment:
+@@ -1986,7 +1988,7 @@
+ if options.toplevel_dir and options.toplevel_dir != '.':
+ arguments += '-C', options.toplevel_dir
+ arguments.append('BUILDTYPE=' + config)
+- print 'Building [%s]: %s' % (config, arguments)
++ print('Building [%s]: %s' % (config, arguments))
+ subprocess.check_call(arguments)
+
+
+diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/pylib/gyp/generator/msvs.py
+index 8fe9e5a..e8a2b36 100644
+--- a/tools/gyp/pylib/gyp/generator/msvs.py
++++ b/tools/gyp/pylib/gyp/generator/msvs.py
+@@ -2,6 +2,9 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
++from __future__ import print_function
++
++import collections
+ import copy
+ import ntpath
+ import os
+@@ -23,16 +26,6 @@
+ from gyp.common import GypError
+ from gyp.common import OrderedSet
+
+-# TODO: Remove once bots are on 2.7, http://crbug.com/241769
+-def _import_OrderedDict():
+- import collections
+- try:
+- return collections.OrderedDict
+- except AttributeError:
+- import gyp.ordered_dict
+- return gyp.ordered_dict.OrderedDict
+-OrderedDict = _import_OrderedDict()
+-
+
+ # Regular expression for validating Visual Studio GUIDs. If the GUID
+ # contains lowercase hex letters, MSVS will be fine. However,
+@@ -202,7 +195,7 @@
+ if not prefix: prefix = []
+ result = []
+ excluded_result = []
+- folders = OrderedDict()
++ folders = collections.OrderedDict()
+ # Gather files into the final result, excluded, or folders.
+ for s in sources:
+ if len(s) == 1:
+@@ -469,7 +462,7 @@
+ 'CommandLine': cmd,
+ })
+ # Add to the properties of primary input for each config.
+- for config_name, c_data in spec['configurations'].iteritems():
++ for config_name, c_data in spec['configurations'].items():
+ p.AddFileConfig(_FixPath(primary_input),
+ _ConfigFullName(config_name, c_data), tools=[tool])
+
+@@ -775,8 +768,8 @@
+ # the VCProj but cause the same problem on the final command-line. Moving
+ # the item to the end of the list does works, but that's only possible if
+ # there's only one such item. Let's just warn the user.
+- print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
+- 'quotes in ' + s)
++ print(('Warning: MSVS may misinterpret the odd number of ' +
++ 'quotes in ' + s), file=sys.stderr)
+ return s
+
+
+@@ -991,7 +984,7 @@
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+- for basename, files in basenames.iteritems():
++ for basename, files in basenames.items():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+@@ -1023,7 +1016,7 @@
+ relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
+
+ config_type = _GetMSVSConfigurationType(spec, project.build_file)
+- for config_name, config in spec['configurations'].iteritems():
++ for config_name, config in spec['configurations'].items():
+ _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
+
+ # MSVC08 and prior version cannot handle duplicate basenames in the same
+@@ -1392,10 +1385,10 @@
+ A list of Tool objects.
+ """
+ tool_list = []
+- for tool, settings in tools.iteritems():
++ for tool, settings in tools.items():
+ # Collapse settings with lists.
+ settings_fixed = {}
+- for setting, value in settings.iteritems():
++ for setting, value in settings.items():
+ if type(value) == list:
+ if ((tool == 'VCLinkerTool' and
+ setting == 'AdditionalDependencies') or
+@@ -1570,7 +1563,7 @@
+ def _GetPrecompileRelatedFiles(spec):
+ # Gather a list of precompiled header related sources.
+ precompiled_related = []
+- for _, config in spec['configurations'].iteritems():
++ for _, config in spec['configurations'].items():
+ for k in precomp_keys:
+ f = config.get(k)
+ if f:
+@@ -1581,7 +1574,7 @@
+ def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
+ list_excluded):
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+- for file_name, excluded_configs in exclusions.iteritems():
++ for file_name, excluded_configs in exclusions.items():
+ if (not list_excluded and
+ len(excluded_configs) == len(spec['configurations'])):
+ # If we're not listing excluded files, then they won't appear in the
+@@ -1598,7 +1591,7 @@
+ # Exclude excluded sources from being built.
+ for f in excluded_sources:
+ excluded_configs = []
+- for config_name, config in spec['configurations'].iteritems():
++ for config_name, config in spec['configurations'].items():
+ precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
+ # Don't do this for ones that are precompiled header related.
+ if f not in precomped:
+@@ -1608,7 +1601,7 @@
+ # Exclude them now.
+ for f in excluded_idl:
+ excluded_configs = []
+- for config_name, config in spec['configurations'].iteritems():
++ for config_name, config in spec['configurations'].items():
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ return exclusions
+@@ -1617,7 +1610,7 @@
+ def _AddToolFilesToMSVS(p, spec):
+ # Add in tool files (rules).
+ tool_files = OrderedSet()
+- for _, config in spec['configurations'].iteritems():
++ for _, config in spec['configurations'].items():
+ for f in config.get('msvs_tool_files', []):
+ tool_files.add(f)
+ for f in tool_files:
+@@ -1630,7 +1623,7 @@
+ # kind (i.e. C vs. C++) as the precompiled header source stub needs
+ # to have use of precompiled headers disabled.
+ extensions_excluded_from_precompile = []
+- for config_name, config in spec['configurations'].iteritems():
++ for config_name, config in spec['configurations'].items():
+ source = config.get('msvs_precompiled_source')
+ if source:
+ source = _FixPath(source)
+@@ -1651,7 +1644,7 @@
+ else:
+ basename, extension = os.path.splitext(source)
+ if extension in extensions_excluded_from_precompile:
+- for config_name, config in spec['configurations'].iteritems():
++ for config_name, config in spec['configurations'].items():
+ tool = MSVSProject.Tool('VCCLCompilerTool',
+ {'UsePrecompiledHeader': '0',
+ 'ForcedIncludeFiles': '$(NOINHERIT)'})
+@@ -1702,7 +1695,7 @@
+ return # Nothing to add
+ # Write out the user file.
+ user_file = _CreateMSVSUserFile(project_path, version, spec)
+- for config_name, c_data in spec['configurations'].iteritems():
++ for config_name, c_data in spec['configurations'].items():
+ user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
+ action, environment, working_directory)
+ user_file.WriteIfChanged()
+@@ -1756,7 +1749,7 @@
+ def _DictsToFolders(base_path, bucket, flat):
+ # Convert to folders recursively.
+ children = []
+- for folder, contents in bucket.iteritems():
++ for folder, contents in bucket.items():
+ if type(contents) == dict:
+ folder_children = _DictsToFolders(os.path.join(base_path, folder),
+ contents, flat)
+@@ -1778,8 +1771,8 @@
+ # such projects up one level.
+ if (type(node) == dict and
+ len(node) == 1 and
+- node.keys()[0] == parent + '.vcproj'):
+- return node[node.keys()[0]]
++ next(iter(node)) == parent + '.vcproj'):
++ return node[next(iter(node))]
+ if type(node) != dict:
+ return node
+ for child in node:
+@@ -1798,8 +1791,8 @@
+ # Walk down from the top until we hit a folder that has more than one entry.
+ # In practice, this strips the top-level "src/" dir from the hierarchy in
+ # the solution.
+- while len(root) == 1 and type(root[root.keys()[0]]) == dict:
+- root = root[root.keys()[0]]
++ while len(root) == 1 and type(root[next(iter(root))]) == dict:
++ root = root[next(iter(root))]
+ # Collapse singles.
+ root = _CollapseSingles('', root)
+ # Merge buckets until everything is a root entry.
+@@ -1828,7 +1821,7 @@
+ # Prepare a dict indicating which project configurations are used for which
+ # solution configurations for this target.
+ config_platform_overrides = {}
+- for config_name, c in spec['configurations'].iteritems():
++ for config_name, c in spec['configurations'].items():
+ config_fullname = _ConfigFullName(config_name, c)
+ platform = c.get('msvs_target_platform', _ConfigPlatform(c))
+ fixed_config_fullname = '%s|%s' % (
+@@ -1967,7 +1960,7 @@
+ msvs_version = params['msvs_version']
+ devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
+
+- for build_file, build_file_dict in data.iteritems():
++ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+@@ -1977,7 +1970,7 @@
+
+ for config in configurations:
+ arguments = [devenv, sln_path, '/Build', config]
+- print 'Building [%s]: %s' % (config, arguments)
++ print('Building [%s]: %s' % (config, arguments))
+ rtn = subprocess.check_call(arguments)
+
+
+@@ -2029,7 +2022,7 @@
+ configs = set()
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+- for config_name, config in spec['configurations'].iteritems():
++ for config_name, config in spec['configurations'].items():
+ configs.add(_ConfigFullName(config_name, config))
+ configs = list(configs)
+
+@@ -2072,7 +2065,7 @@
+ if generator_flags.get('msvs_error_on_missing_sources', False):
+ raise GypError(error_message)
+ else:
+- print >> sys.stdout, "Warning: " + error_message
++ print("Warning: " + error_message, file=sys.stdout)
+
+
+ def _GenerateMSBuildFiltersFile(filters_path, source_files,
+@@ -2669,7 +2662,7 @@
+
+ def _GetMSBuildProjectConfigurations(configurations):
+ group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
+- for (name, settings) in sorted(configurations.iteritems()):
++ for (name, settings) in sorted(configurations.items()):
+ configuration, platform = _GetConfigurationAndPlatform(name, settings)
+ designation = '%s|%s' % (configuration, platform)
+ group.append(
+@@ -2742,7 +2735,7 @@
+
+ def _GetMSBuildConfigurationDetails(spec, build_file):
+ properties = {}
+- for name, settings in spec['configurations'].iteritems():
++ for name, settings in spec['configurations'].items():
+ msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
+ condition = _GetConfigurationCondition(name, settings)
+ character_set = msbuild_attributes.get('CharacterSet')
+@@ -2776,9 +2769,9 @@
+ user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
+ additional_props = {}
+ props_specified = False
+- for name, settings in sorted(configurations.iteritems()):
++ for name, settings in sorted(configurations.items()):
+ configuration = _GetConfigurationCondition(name, settings)
+- if settings.has_key('msbuild_props'):
++ if 'msbuild_props' in settings:
+ additional_props[configuration] = _FixPaths(settings['msbuild_props'])
+ props_specified = True
+ else:
+@@ -2798,7 +2791,7 @@
+ ]
+ else:
+ sheets = []
+- for condition, props in additional_props.iteritems():
++ for condition, props in additional_props.items():
+ import_group = [
+ 'ImportGroup',
+ {'Label': 'PropertySheets',
+@@ -2831,7 +2824,7 @@
+ elif a == 'ConfigurationType':
+ msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
+ else:
+- print 'Warning: Do not know how to convert MSVS attribute ' + a
++ print('Warning: Do not know how to convert MSVS attribute ' + a)
+ return msbuild_attributes
+
+
+@@ -2927,7 +2920,7 @@
+ new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
+
+ properties = {}
+- for (name, configuration) in sorted(configurations.iteritems()):
++ for (name, configuration) in sorted(configurations.items()):
+ condition = _GetConfigurationCondition(name, configuration)
+ attributes = _GetMSBuildAttributes(spec, configuration, build_file)
+ msbuild_settings = configuration['finalized_msbuild_settings']
+@@ -2952,7 +2945,7 @@
+ _AddConditionalProperty(properties, condition, 'ExecutablePath',
+ new_paths)
+ tool_settings = msbuild_settings.get('', {})
+- for name, value in sorted(tool_settings.iteritems()):
++ for name, value in sorted(tool_settings.items()):
+ formatted_value = _GetValueFormattedForMSBuild('', name, value)
+ _AddConditionalProperty(properties, condition, name, formatted_value)
+ return _GetMSBuildPropertyGroup(spec, None, properties)
+@@ -3021,7 +3014,7 @@
+ # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
+ for name in reversed(properties_ordered):
+ values = properties[name]
+- for value, conditions in sorted(values.iteritems()):
++ for value, conditions in sorted(values.items()):
+ if len(conditions) == num_configurations:
+ # If the value is the same all configurations,
+ # just add one unconditional entry.
+@@ -3034,18 +3027,18 @@
+
+ def _GetMSBuildToolSettingsSections(spec, configurations):
+ groups = []
+- for (name, configuration) in sorted(configurations.iteritems()):
++ for (name, configuration) in sorted(configurations.items()):
+ msbuild_settings = configuration['finalized_msbuild_settings']
+ group = ['ItemDefinitionGroup',
+ {'Condition': _GetConfigurationCondition(name, configuration)}
+ ]
+- for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
++ for tool_name, tool_settings in sorted(msbuild_settings.items()):
+ # Skip the tool named '' which is a holder of global settings handled
+ # by _GetMSBuildConfigurationGlobalProperties.
+ if tool_name:
+ if tool_settings:
+ tool = [tool_name]
+- for name, value in sorted(tool_settings.iteritems()):
++ for name, value in sorted(tool_settings.items()):
+ formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
+ value)
+ tool.append([name, formatted_value])
+@@ -3078,8 +3071,8 @@
+ for ignored_setting in ignored_settings:
+ value = configuration.get(ignored_setting)
+ if value:
+- print ('Warning: The automatic conversion to MSBuild does not handle '
+- '%s. Ignoring setting of %s' % (ignored_setting, str(value)))
++ print('Warning: The automatic conversion to MSBuild does not handle '
++ '%s. Ignoring setting of %s' % (ignored_setting, str(value)))
+
+ defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(configuration)
+@@ -3245,7 +3238,7 @@
+ {'Condition': condition},
+ 'true'])
+ # Add precompile if needed
+- for config_name, configuration in spec['configurations'].iteritems():
++ for config_name, configuration in spec['configurations'].items():
+ precompiled_source = configuration.get('msvs_precompiled_source', '')
+ if precompiled_source != '':
+ precompiled_source = _FixPath(precompiled_source)
+@@ -3291,7 +3284,7 @@
+ ['Project', guid],
+ ['ReferenceOutputAssembly', 'false']
+ ]
+- for config in dependency.spec.get('configurations', {}).itervalues():
++ for config in dependency.spec.get('configurations', {}).values():
+ if config.get('msvs_use_library_dependency_inputs', 0):
+ project_ref.append(['UseLibraryDependencyInputs', 'true'])
+ break
+@@ -3360,7 +3353,7 @@
+ extension_to_rule_name)
+ missing_sources = _VerifySourcesExist(sources, project_dir)
+
+- for configuration in configurations.itervalues():
++ for configuration in configurations.values():
+ _FinalizeMSBuildSettings(spec, configuration)
+
+ # Add attributes to root element
+@@ -3486,7 +3479,7 @@
+ """
+ sources_handled_by_action = OrderedSet()
+ actions_spec = []
+- for primary_input, actions in actions_to_add.iteritems():
++ for primary_input, actions in actions_to_add.items():
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ descriptions = []
+diff --git a/tools/gyp/pylib/gyp/generator/msvs_test.py b/pylib/gyp/generator/msvs_test.py
+index c0b021d..838d236 100755
+--- a/tools/gyp/pylib/gyp/generator/msvs_test.py
++++ b/tools/gyp/pylib/gyp/generator/msvs_test.py
+@@ -7,13 +7,16 @@
+
+ import gyp.generator.msvs as msvs
+ import unittest
+-import StringIO
++try:
++ from StringIO import StringIO
++except ImportError:
++ from io import StringIO
+
+
+ class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+- self.stderr = StringIO.StringIO()
++ self.stderr = StringIO()
+
+ def test_GetLibraries(self):
+ self.assertEqual(
+diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/pylib/gyp/generator/ninja.py
+index 6de87b7..66faabc 100644
+--- a/tools/gyp/pylib/gyp/generator/ninja.py
++++ b/tools/gyp/pylib/gyp/generator/ninja.py
+@@ -2,6 +2,8 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
++from __future__ import print_function
++
+ import collections
+ import copy
+ import hashlib
+@@ -18,7 +20,10 @@
+ import gyp.msvs_emulation
+ import gyp.MSVSUtil as MSVSUtil
+ import gyp.xcode_emulation
+-from cStringIO import StringIO
++try:
++ from cStringIO import StringIO
++except ImportError:
++ from io import StringIO
+
+ from gyp.common import GetEnvironFallback
+ import gyp.ninja_syntax as ninja_syntax
+@@ -350,7 +355,7 @@
+
+ Uses a stamp file if necessary."""
+
+- assert targets == filter(None, targets), targets
++ assert targets == [t for t in targets if t], targets
+ if len(targets) == 0:
+ assert not order_only
+ return None
+@@ -427,8 +432,8 @@
+ compile_depends.append(target.PreCompileInput())
+ if target.uses_cpp:
+ self.target.uses_cpp = True
+- actions_depends = filter(None, actions_depends)
+- compile_depends = filter(None, compile_depends)
++ actions_depends = [d for d in actions_depends if d]
++ compile_depends = [d for d in compile_depends if d]
+ actions_depends = self.WriteCollapsedDependencies('actions_depends',
+ actions_depends)
+ compile_depends = self.WriteCollapsedDependencies('compile_depends',
+@@ -455,8 +460,8 @@
+ try:
+ sources = extra_sources + spec.get('sources', [])
+ except TypeError:
+- print 'extra_sources: ', str(extra_sources)
+- print 'spec.get("sources"): ', str(spec.get('sources'))
++ print('extra_sources: ', str(extra_sources))
++ print('spec.get("sources"): ', str(spec.get('sources')))
+ raise
+ if sources:
+ if self.flavor == 'mac' and len(self.archs) > 1:
+@@ -485,8 +490,9 @@
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
+ else:
+- print "Warning: Actions/rules writing object files don't work with " \
+- "multiarch targets, dropping. (target %s)" % spec['target_name']
++ print("Warning: Actions/rules writing object files don't work with " \
++ "multiarch targets, dropping. (target %s)" %
++ spec['target_name'])
+ elif self.flavor == 'mac' and len(self.archs) > 1:
+ link_deps = collections.defaultdict(list)
+
+@@ -838,7 +844,7 @@
+ 'XCASSETS_LAUNCH_IMAGE': 'launch-image',
+ }
+ settings = self.xcode_settings.xcode_settings[self.config_name]
+- for settings_key, arg_name in settings_to_arg.iteritems():
++ for settings_key, arg_name in settings_to_arg.items():
+ value = settings.get(settings_key)
+ if value:
+ extra_arguments[arg_name] = value
+@@ -1772,7 +1778,7 @@
+
+ # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
+ # on a 64 GB machine.
+- mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
++ mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB
+ hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+ return min(mem_limit, hard_cap)
+ elif sys.platform.startswith('linux'):
+@@ -1784,14 +1790,14 @@
+ if not match:
+ continue
+ # Allow 8Gb per link on Linux because Gold is quite memory hungry
+- return max(1, int(match.group(1)) / (8 * (2 ** 20)))
++ return max(1, int(match.group(1)) // (8 * (2 ** 20)))
+ return 1
+ elif sys.platform == 'darwin':
+ try:
+ avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+ # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+ # 4GB per ld process allows for some more bloat.
+- return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
++ return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB
+ except:
+ return 1
+ else:
+@@ -1946,7 +1952,7 @@
+ wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
+
+ # Support wrappers from environment variables too.
+- for key, value in os.environ.iteritems():
++ for key, value in os.environ.items():
+ if key.lower().endswith('_wrapper'):
+ key_prefix = key[:-len('_wrapper')]
+ key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
+@@ -1966,7 +1972,7 @@
+ configs, generator_flags)
+ cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
+ toplevel_build, generator_flags, shared_system_includes, OpenOutput)
+- for arch, path in sorted(cl_paths.iteritems()):
++ for arch, path in sorted(cl_paths.items()):
+ if clang_cl:
+ # If we have selected clang-cl, use that instead.
+ path = clang_cl
+@@ -2381,6 +2387,7 @@
+
+ qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
+ toolset)
++ qualified_target_for_hash = qualified_target_for_hash.encode('utf-8')
+ hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
+
+ base_path = os.path.dirname(build_file)
+@@ -2447,7 +2454,7 @@
+ for config in configurations:
+ builddir = os.path.join(options.toplevel_dir, 'out', config)
+ arguments = ['ninja', '-C', builddir]
+- print 'Building [%s]: %s' % (config, arguments)
++ print('Building [%s]: %s' % (config, arguments))
+ subprocess.check_call(arguments)
+
+
+@@ -2475,7 +2482,7 @@
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ user_config)
+ else:
+- config_names = target_dicts[target_list[0]]['configurations'].keys()
++ config_names = target_dicts[target_list[0]]['configurations']
+ if params['parallel']:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+@@ -2484,7 +2491,7 @@
+ arglists.append(
+ (target_list, target_dicts, data, params, config_name))
+ pool.map(CallGenerateOutputForConfig, arglists)
+- except KeyboardInterrupt, e:
++ except KeyboardInterrupt as e:
+ pool.terminate()
+ raise e
+ else:
+diff --git a/tools/gyp/pylib/gyp/generator/ninja_test.py b/pylib/gyp/generator/ninja_test.py
+index 1767b2f..1ad68e4 100644
+--- a/tools/gyp/pylib/gyp/generator/ninja_test.py
++++ b/tools/gyp/pylib/gyp/generator/ninja_test.py
+@@ -8,7 +8,6 @@
+
+ import gyp.generator.ninja as ninja
+ import unittest
+-import StringIO
+ import sys
+ import TestCommon
+
+diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/pylib/gyp/generator/xcode.py
+index b35372a..8bc22be 100644
+--- a/tools/gyp/pylib/gyp/generator/xcode.py
++++ b/tools/gyp/pylib/gyp/generator/xcode.py
+@@ -2,6 +2,8 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
++from __future__ import print_function
++
+ import filecmp
+ import gyp.common
+ import gyp.xcodeproj_file
+@@ -129,7 +131,7 @@
+ try:
+ os.makedirs(self.path)
+ self.created_dir = True
+- except OSError, e:
++ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+@@ -183,7 +185,7 @@
+ # the tree tree view for UI display.
+ # Any values set globally are applied to all configurations, then any
+ # per-configuration values are applied.
+- for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
++ for xck, xcv in self.build_file_dict.get('xcode_settings', {}).items():
+ xccl.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in self.build_file_dict:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+@@ -197,7 +199,7 @@
+ if build_file_configuration_named:
+ xcc = xccl.ConfigurationNamed(config_name)
+ for xck, xcv in build_file_configuration_named.get('xcode_settings',
+- {}).iteritems():
++ {}).items():
+ xcc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in build_file_configuration_named:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+@@ -273,7 +275,7 @@
+ script = script + "\n".join(
+ ['export %s="%s"' %
+ (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
+- for (key, val) in command.get('environment').iteritems()]) + "\n"
++ for (key, val) in command.get('environment').items()]) + "\n"
+
+ # Some test end up using sockets, files on disk, etc. and can get
+ # confused if more then one test runs at a time. The generator
+@@ -444,7 +446,7 @@
+ dir=self.path)
+
+ try:
+- output_file = os.fdopen(output_fd, 'wb')
++ output_file = os.fdopen(output_fd, 'w')
+
+ self.project_file.Print(output_file)
+ output_file.close()
+@@ -454,7 +456,7 @@
+ same = False
+ try:
+ same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
+- except OSError, e:
++ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+@@ -473,10 +475,10 @@
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+- umask = os.umask(077)
++ umask = os.umask(0o77)
+ os.umask(umask)
+
+- os.chmod(new_pbxproj_path, 0666 & ~umask)
++ os.chmod(new_pbxproj_path, 0o666 & ~umask)
+ os.rename(new_pbxproj_path, pbxproj_path)
+
+ except Exception:
+@@ -566,7 +568,7 @@
+ def PerformBuild(data, configurations, params):
+ options = params['options']
+
+- for build_file, build_file_dict in data.iteritems():
++ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+@@ -577,7 +579,7 @@
+ for config in configurations:
+ arguments = ['xcodebuild', '-project', xcodeproj_path]
+ arguments += ['-configuration', config]
+- print "Building [%s]: %s" % (config, arguments)
++ print("Building [%s]: %s" % (config, arguments))
+ subprocess.check_call(arguments)
+
+
+@@ -625,7 +627,7 @@
+ skip_excluded_files = \
+ not generator_flags.get('xcode_list_excluded_files', True)
+ xcode_projects = {}
+- for build_file, build_file_dict in data.iteritems():
++ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+@@ -744,7 +746,7 @@
+ xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
+ try:
+ target_properties['productType'] = _types[type_bundle_key]
+- except KeyError, e:
++ except KeyError as e:
+ gyp.common.ExceptionAppend(e, "-- unknown product type while "
+ "writing target %s" % target_name)
+ raise
+@@ -1016,22 +1018,21 @@
+ makefile_name)
+ # TODO(mark): try/close? Write to a temporary file and swap it only
+ # if it's got changes?
+- makefile = open(makefile_path, 'wb')
++ makefile = open(makefile_path, 'w')
+
+ # make will build the first target in the makefile by default. By
+ # convention, it's called "all". List all (or at least one)
+ # concrete output for each rule source as a prerequisite of the "all"
+ # target.
+ makefile.write('all: \\\n')
+- for concrete_output_index in \
+- xrange(0, len(concrete_outputs_by_rule_source)):
++ for concrete_output_index, concrete_output_by_rule_source in \
++ enumerate(concrete_outputs_by_rule_source):
+ # Only list the first (index [0]) concrete output of each input
+ # in the "all" target. Otherwise, a parallel make (-j > 1) would
+ # attempt to process each input multiple times simultaneously.
+ # Otherwise, "all" could just contain the entire list of
+ # concrete_outputs_all.
+- concrete_output = \
+- concrete_outputs_by_rule_source[concrete_output_index][0]
++ concrete_output = concrete_output_by_rule_source[0]
+ if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
+ eol = ''
+ else:
+@@ -1047,8 +1048,8 @@
+ # rule source. Collect the names of the directories that are
+ # required.
+ concrete_output_dirs = []
+- for concrete_output_index in xrange(0, len(concrete_outputs)):
+- concrete_output = concrete_outputs[concrete_output_index]
++ for concrete_output_index, concrete_output in \
++ enumerate(concrete_outputs):
+ if concrete_output_index == 0:
+ bol = ''
+ else:
+@@ -1066,8 +1067,7 @@
+ # the set of additional rule inputs, if any.
+ prerequisites = [rule_source]
+ prerequisites.extend(rule.get('inputs', []))
+- for prerequisite_index in xrange(0, len(prerequisites)):
+- prerequisite = prerequisites[prerequisite_index]
++ for prerequisite_index, prerequisite in enumerate(prerequisites):
+ if prerequisite_index == len(prerequisites) - 1:
+ eol = ''
+ else:
+@@ -1279,7 +1279,7 @@
+ set_define = EscapeXcodeDefine(define)
+ xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
+ if 'xcode_settings' in configuration:
+- for xck, xcv in configuration['xcode_settings'].iteritems():
++ for xck, xcv in configuration['xcode_settings'].items():
+ xcbc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in configuration:
+ config_ref = pbxp.AddOrGetFileInRootGroup(
+@@ -1287,7 +1287,7 @@
+ xcbc.SetBaseConfiguration(config_ref)
+
+ build_files = []
+- for build_file, build_file_dict in data.iteritems():
++ for build_file, build_file_dict in data.items():
+ if build_file.endswith('.gyp'):
+ build_files.append(build_file)
+
+diff --git a/tools/gyp/pylib/gyp/input.py b/pylib/gyp/input.py
+index 21b4606..8ac47cb 100644
+--- a/tools/gyp/pylib/gyp/input.py
++++ b/tools/gyp/pylib/gyp/input.py
+@@ -2,8 +2,9 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
+-import ast
++from __future__ import print_function
+
++import ast
+ import gyp.common
+ import gyp.simple_copy
+ import multiprocessing
+@@ -231,10 +232,10 @@
+ else:
+ build_file_data = eval(build_file_contents, {'__builtins__': None},
+ None)
+- except SyntaxError, e:
++ except SyntaxError as e:
+ e.filename = build_file_path
+ raise
+- except Exception, e:
++ except Exception as e:
+ gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
+ raise
+
+@@ -254,7 +255,7 @@
+ else:
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+ aux_data, None, check)
+- except Exception, e:
++ except Exception as e:
+ gyp.common.ExceptionAppend(e,
+ 'while reading includes of ' + build_file_path)
+ raise
+@@ -291,7 +292,7 @@
+ subdict_path, include)
+
+ # Recurse into subdictionaries.
+- for k, v in subdict.iteritems():
++ for k, v in subdict.items():
+ if type(v) is dict:
+ LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
+ None, check)
+@@ -456,7 +457,7 @@
+ try:
+ LoadTargetBuildFile(dependency, data, aux_data, variables,
+ includes, depth, check, load_dependencies)
+- except Exception, e:
++ except Exception as e:
+ gyp.common.ExceptionAppend(
+ e, 'while loading dependencies of %s' % build_file_path)
+ raise
+@@ -477,7 +478,7 @@
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ # Apply globals so that the worker process behaves the same.
+- for key, value in global_flags.iteritems():
++ for key, value in global_flags.items():
+ globals()[key] = value
+
+ SetGeneratorGlobals(generator_input_info)
+@@ -499,12 +500,12 @@
+ return (build_file_path,
+ build_file_data,
+ dependencies)
+- except GypError, e:
++ except GypError as e:
+ sys.stderr.write("gyp: %s\n" % e)
+ return None
+- except Exception, e:
+- print >>sys.stderr, 'Exception:', e
+- print >>sys.stderr, traceback.format_exc()
++ except Exception as e:
++ print('Exception:', e, file=sys.stderr)
++ print(traceback.format_exc(), file=sys.stderr)
+ return None
+
+
+@@ -594,7 +595,7 @@
+ args = (global_flags, dependency,
+ variables, includes, depth, check, generator_input_info),
+ callback = parallel_state.LoadTargetBuildFileCallback)
+- except KeyboardInterrupt, e:
++ except KeyboardInterrupt as e:
+ parallel_state.pool.terminate()
+ raise e
+
+@@ -894,7 +895,7 @@
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ cwd=build_file_dir)
+- except Exception, e:
++ except Exception as e:
+ raise GypError("%s while executing command '%s' in %s" %
+ (e, contents, build_file))
+
+@@ -1008,9 +1009,9 @@
+
+ # Convert all strings that are canonically-represented integers into integers.
+ if type(output) is list:
+- for index in xrange(0, len(output)):
+- if IsStrCanonicalInt(output[index]):
+- output[index] = int(output[index])
++ for index, outstr in enumerate(output):
++ if IsStrCanonicalInt(outstr):
++ output[index] = int(outstr)
+ elif IsStrCanonicalInt(output):
+ output = int(output)
+
+@@ -1079,13 +1080,13 @@
+ if eval(ast_code, {'__builtins__': None}, variables):
+ return true_dict
+ return false_dict
+- except SyntaxError, e:
++ except SyntaxError as e:
+ syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
+ 'at character %d.' %
+ (str(e.args[0]), e.text, build_file, e.offset),
+ e.filename, e.lineno, e.offset, e.text)
+ raise syntax_error
+- except NameError, e:
++ except NameError as e:
+ gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
+ (cond_expr_expanded, build_file))
+ raise GypError(e)
+@@ -1140,7 +1141,7 @@
+ def LoadAutomaticVariablesFromDict(variables, the_dict):
+ # Any keys with plain string values in the_dict become automatic variables.
+ # The variable name is the key name with a "_" character prepended.
+- for key, value in the_dict.iteritems():
++ for key, value in the_dict.items():
+ if type(value) in (str, int, list):
+ variables['_' + key] = value
+
+@@ -1153,7 +1154,7 @@
+ # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
+ # (it could be a list or it could be parentless because it is a root dict),
+ # the_dict_key will be None.
+- for key, value in the_dict.get('variables', {}).iteritems():
++ for key, value in the_dict.get('variables', {}).items():
+ if type(value) not in (str, int, list):
+ continue
+
+@@ -1192,7 +1193,7 @@
+ # list before we process them so that you can reference one
+ # variable from another. They will be fully expanded by recursion
+ # in ExpandVariables.
+- for key, value in the_dict['variables'].iteritems():
++ for key, value in the_dict['variables'].items():
+ variables[key] = value
+
+ # Handle the associated variables dict first, so that any variable
+@@ -1205,7 +1206,7 @@
+
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+- for key, value in the_dict.iteritems():
++ for key, value in the_dict.items():
+ # Skip "variables", which was already processed if present.
+ if key != 'variables' and type(value) is str:
+ expanded = ExpandVariables(value, phase, variables, build_file)
+@@ -1263,7 +1264,7 @@
+
+ # Recurse into child dicts, or process child lists which may result in
+ # further recursion into descendant dicts.
+- for key, value in the_dict.iteritems():
++ for key, value in the_dict.items():
+ # Skip "variables" and string values, which were already processed if
+ # present.
+ if key == 'variables' or type(value) is str:
+@@ -1360,14 +1361,14 @@
+ for dep in dependency_sections
+ for op in ('', '!', '/')]
+
+- for target, target_dict in targets.iteritems():
++ for target, target_dict in targets.items():
+ target_build_file = gyp.common.BuildFile(target)
+ toolset = target_dict['toolset']
+ for dependency_key in all_dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+- for index in xrange(0, len(dependencies)):
++ for index, dep in enumerate(dependencies):
+ dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
+- target_build_file, dependencies[index], toolset)
++ target_build_file, dep, toolset)
+ if not multiple_toolsets:
+ # Ignore toolset specification in the dependency if it is specified.
+ dep_toolset = toolset
+@@ -1400,7 +1401,7 @@
+ dependency list, must be qualified when this function is called.
+ """
+
+- for target, target_dict in targets.iteritems():
++ for target, target_dict in targets.items():
+ toolset = target_dict['toolset']
+ target_build_file = gyp.common.BuildFile(target)
+ for dependency_key in dependency_sections:
+@@ -1462,7 +1463,7 @@
+ def RemoveDuplicateDependencies(targets):
+ """Makes sure every dependency appears only once in all targets's dependency
+ lists."""
+- for target_name, target_dict in targets.iteritems():
++ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+@@ -1478,7 +1479,7 @@
+ def RemoveSelfDependencies(targets):
+ """Remove self dependencies from targets that have the prune_self_dependency
+ variable set."""
+- for target_name, target_dict in targets.iteritems():
++ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+@@ -1491,7 +1492,7 @@
+ def RemoveLinkDependenciesFromNoneTargets(targets):
+ """Remove dependencies having the 'link_dependency' attribute from the 'none'
+ targets."""
+- for target_name, target_dict in targets.iteritems():
++ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+@@ -1783,14 +1784,14 @@
+ # Create a DependencyGraphNode for each target. Put it into a dict for easy
+ # access.
+ dependency_nodes = {}
+- for target, spec in targets.iteritems():
++ for target, spec in targets.items():
+ if target not in dependency_nodes:
+ dependency_nodes[target] = DependencyGraphNode(target)
+
+ # Set up the dependency links. Targets that have no dependencies are treated
+ # as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+- for target, spec in targets.iteritems():
++ for target, spec in targets.items():
+ target_node = dependency_nodes[target]
+ target_build_file = gyp.common.BuildFile(target)
+ dependencies = spec.get('dependencies')
+@@ -1814,7 +1815,7 @@
+ if not root_node.dependents:
+ # If all targets have dependencies, add the first target as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+- target = targets.keys()[0]
++ target = next(iter(targets))
+ target_node = dependency_nodes[target]
+ target_node.dependencies.append(root_node)
+ root_node.dependents.append(target_node)
+@@ -1833,20 +1834,20 @@
+ # Create a DependencyGraphNode for each gyp file containing a target. Put
+ # it into a dict for easy access.
+ dependency_nodes = {}
+- for target in targets.iterkeys():
++ for target in targets.keys():
+ build_file = gyp.common.BuildFile(target)
+ if not build_file in dependency_nodes:
+ dependency_nodes[build_file] = DependencyGraphNode(build_file)
+
+ # Set up the dependency links.
+- for target, spec in targets.iteritems():
++ for target, spec in targets.items():
+ build_file = gyp.common.BuildFile(target)
+ build_file_node = dependency_nodes[build_file]
+ target_dependencies = spec.get('dependencies', [])
+ for dependency in target_dependencies:
+ try:
+ dependency_build_file = gyp.common.BuildFile(dependency)
+- except GypError, e:
++ except GypError as e:
+ gyp.common.ExceptionAppend(
+ e, 'while computing dependencies of .gyp file %s' % build_file)
+ raise
+@@ -1864,7 +1865,7 @@
+
+ # Files that have no dependencies are treated as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+- for build_file_node in dependency_nodes.itervalues():
++ for build_file_node in dependency_nodes.values():
+ if len(build_file_node.dependencies) == 0:
+ build_file_node.dependencies.append(root_node)
+ root_node.dependents.append(build_file_node)
+@@ -1877,7 +1878,7 @@
+ if not root_node.dependents:
+ # If all files have dependencies, add the first file as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+- file_node = dependency_nodes.values()[0]
++ file_node = next(iter(dependency_nodes.values()))
+ file_node.dependencies.append(root_node)
+ root_node.dependents.append(file_node)
+ cycles = []
+@@ -2104,7 +2105,7 @@
+
+ def MergeDicts(to, fro, to_file, fro_file):
+ # I wanted to name the parameter "from" but it's a Python keyword...
+- for k, v in fro.iteritems():
++ for k, v in fro.items():
+ # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
+ # copy semantics. Something else may want to merge from the |fro| dict
+ # later, and having the same dict ref pointed to twice in the tree isn't
+@@ -2239,13 +2240,13 @@
+ if not 'configurations' in target_dict:
+ target_dict['configurations'] = {'Default': {}}
+ if not 'default_configuration' in target_dict:
+- concrete = [i for (i, config) in target_dict['configurations'].iteritems()
++ concrete = [i for (i, config) in target_dict['configurations'].items()
+ if not config.get('abstract')]
+ target_dict['default_configuration'] = sorted(concrete)[0]
+
+ merged_configurations = {}
+ configs = target_dict['configurations']
+- for (configuration, old_configuration_dict) in configs.iteritems():
++ for (configuration, old_configuration_dict) in configs.items():
+ # Skip abstract configurations (saves work only).
+ if old_configuration_dict.get('abstract'):
+ continue
+@@ -2253,7 +2254,7 @@
+ # Get the inheritance relationship right by making a copy of the target
+ # dict.
+ new_configuration_dict = {}
+- for (key, target_val) in target_dict.iteritems():
++ for (key, target_val) in target_dict.items():
+ key_ext = key[-1:]
+ if key_ext in key_suffixes:
+ key_base = key[:-1]
+@@ -2274,10 +2275,9 @@
+ merged_configurations[configuration])
+
+ # Now drop all the abstract ones.
+- for configuration in target_dict['configurations'].keys():
+- old_configuration_dict = target_dict['configurations'][configuration]
+- if old_configuration_dict.get('abstract'):
+- del target_dict['configurations'][configuration]
++ configs = target_dict['configurations']
++ target_dict['configurations'] = \
++ {k: v for k, v in configs.items() if not v.get('abstract')}
+
+ # Now that all of the target's configurations have been built, go through
+ # the target dict's keys and remove everything that's been moved into a
+@@ -2337,7 +2337,7 @@
+
+ lists = []
+ del_lists = []
+- for key, value in the_dict.iteritems():
++ for key, value in the_dict.items():
+ operation = key[-1]
+ if operation != '!' and operation != '/':
+ continue
+@@ -2385,8 +2385,8 @@
+ exclude_key = list_key + '!'
+ if exclude_key in the_dict:
+ for exclude_item in the_dict[exclude_key]:
+- for index in xrange(0, len(the_list)):
+- if exclude_item == the_list[index]:
++ for index, list_item in enumerate(the_list):
++ if exclude_item == list_item:
+ # This item matches the exclude_item, so set its action to 0
+ # (exclude).
+ list_actions[index] = 0
+@@ -2411,8 +2411,7 @@
+ raise ValueError('Unrecognized action ' + action + ' in ' + name + \
+ ' key ' + regex_key)
+
+- for index in xrange(0, len(the_list)):
+- list_item = the_list[index]
++ for index, list_item in enumerate(the_list):
+ if list_actions[index] == action_value:
+ # Even if the regex matches, nothing will change so continue (regex
+ # searches are expensive).
+@@ -2442,7 +2441,7 @@
+ # the indices of items that haven't been seen yet don't shift. That means
+ # that things need to be prepended to excluded_list to maintain them in the
+ # same order that they existed in the_list.
+- for index in xrange(len(list_actions) - 1, -1, -1):
++ for index in range(len(list_actions) - 1, -1, -1):
+ if list_actions[index] == 0:
+ # Dump anything with action 0 (exclude). Keep anything with action 1
+ # (include) or -1 (no include or exclude seen for the item).
+@@ -2455,7 +2454,7 @@
+ the_dict[excluded_key] = excluded_list
+
+ # Now recurse into subdicts and lists that may contain dicts.
+- for key, value in the_dict.iteritems():
++ for key, value in the_dict.items():
+ if type(value) is dict:
+ ProcessListFiltersInDict(key, value)
+ elif type(value) is list:
+@@ -2512,7 +2511,7 @@
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+- for basename, files in basenames.iteritems():
++ for basename, files in basenames.items():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+@@ -2651,8 +2650,7 @@
+ def TurnIntIntoStrInList(the_list):
+ """Given list the_list, recursively converts all integers into strings.
+ """
+- for index in xrange(0, len(the_list)):
+- item = the_list[index]
++ for index, item in enumerate(the_list):
+ if type(item) is int:
+ the_list[index] = str(item)
+ elif type(item) is dict:
+@@ -2769,7 +2767,7 @@
+ try:
+ LoadTargetBuildFile(build_file, data, aux_data,
+ variables, includes, depth, check, True)
+- except Exception, e:
++ except Exception as e:
+ gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
+ raise
+
+@@ -2791,7 +2789,7 @@
+ RemoveLinkDependenciesFromNoneTargets(targets)
+
+ # Apply exclude (!) and regex (/) list filters only for dependency_sections.
+- for target_name, target_dict in targets.iteritems():
++ for target_name, target_dict in targets.items():
+ tmp_dict = {}
+ for key_base in dependency_sections:
+ for op in ('', '!', '/'):
+diff --git a/tools/gyp/pylib/gyp/input_test.py b/pylib/gyp/input_test.py
+index 4234fbb..6c4b1cc 100755
+--- a/tools/gyp/pylib/gyp/input_test.py
++++ b/tools/gyp/pylib/gyp/input_test.py
+@@ -22,7 +22,7 @@
+ dependency.dependents.append(dependent)
+
+ def test_no_cycle_empty_graph(self):
+- for label, node in self.nodes.iteritems():
++ for label, node in self.nodes.items():
+ self.assertEquals([], node.FindCycles())
+
+ def test_no_cycle_line(self):
+@@ -30,7 +30,7 @@
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+ self._create_dependency(self.nodes['c'], self.nodes['d'])
+
+- for label, node in self.nodes.iteritems():
++ for label, node in self.nodes.items():
+ self.assertEquals([], node.FindCycles())
+
+ def test_no_cycle_dag(self):
+@@ -38,7 +38,7 @@
+ self._create_dependency(self.nodes['a'], self.nodes['c'])
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+
+- for label, node in self.nodes.iteritems():
++ for label, node in self.nodes.items():
+ self.assertEquals([], node.FindCycles())
+
+ def test_cycle_self_reference(self):
+diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/pylib/gyp/mac_tool.py
+index 0ad7e7a..7d3a8c2 100755
+--- a/tools/gyp/pylib/gyp/mac_tool.py
++++ b/tools/gyp/pylib/gyp/mac_tool.py
+@@ -8,6 +8,8 @@
+ These functions are executed via gyp-mac-tool when using the Makefile generator.
+ """
+
++from __future__ import print_function
++
+ import fcntl
+ import fnmatch
+ import glob
+@@ -16,7 +18,6 @@
+ import plistlib
+ import re
+ import shutil
+-import string
+ import struct
+ import subprocess
+ import sys
+@@ -155,11 +156,11 @@
+ fp.close()
+ return None
+ fp.close()
+- if header.startswith("\xFE\xFF"):
++ if header.startswith(b"\xFE\xFF"):
+ return "UTF-16"
+- elif header.startswith("\xFF\xFE"):
++ elif header.startswith(b"\xFF\xFE"):
+ return "UTF-16"
+- elif header.startswith("\xEF\xBB\xBF"):
++ elif header.startswith(b"\xEF\xBB\xBF"):
+ return "UTF-8"
+ else:
+ return None
+@@ -174,7 +175,7 @@
+ # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
+ plist = plistlib.readPlistFromString(lines)
+ if keys:
+- plist = dict(plist.items() + json.loads(keys[0]).items())
++ plist.update(json.loads(keys[0]))
+ lines = plistlib.writePlistToString(plist)
+
+ # Go through all the environment variables and replace them as variables in
+@@ -185,7 +186,7 @@
+ continue
+ evar = '${%s}' % key
+ evalue = os.environ[key]
+- lines = string.replace(lines, evar, evalue)
++ lines = lines.replace(evar, evalue)
+
+ # Xcode supports various suffices on environment variables, which are
+ # all undocumented. :rfc1034identifier is used in the standard project
+@@ -195,11 +196,11 @@
+ # in a URL either -- oops, hence :rfc1034identifier was born.
+ evar = '${%s:identifier}' % key
+ evalue = IDENT_RE.sub('_', os.environ[key])
+- lines = string.replace(lines, evar, evalue)
++ lines = lines.replace(evar, evalue)
+
+ evar = '${%s:rfc1034identifier}' % key
+ evalue = IDENT_RE.sub('-', os.environ[key])
+- lines = string.replace(lines, evar, evalue)
++ lines = lines.replace(evar, evalue)
+
+ # Remove any keys with values that haven't been replaced.
+ lines = lines.split('\n')
+@@ -270,7 +271,7 @@
+ _, err = libtoolout.communicate()
+ for line in err.splitlines():
+ if not libtool_re.match(line) and not libtool_re5.match(line):
+- print >>sys.stderr, line
++ print(line, file=sys.stderr)
+ # Unconditionally touch the output .a file on the command line if present
+ # and the command succeeded. A bit hacky.
+ if not libtoolout.returncode:
+@@ -385,7 +386,7 @@
+ ])
+ if keys:
+ keys = json.loads(keys)
+- for key, value in keys.iteritems():
++ for key, value in keys.items():
+ arg_name = '--' + key
+ if isinstance(value, bool):
+ if value:
+@@ -480,8 +481,9 @@
+ profiles_dir = os.path.join(
+ os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+ if not os.path.isdir(profiles_dir):
+- print >>sys.stderr, (
+- 'cannot find mobile provisioning for %s' % bundle_identifier)
++ print((
++ 'cannot find mobile provisioning for %s' % bundle_identifier),
++ file=sys.stderr)
+ sys.exit(1)
+ provisioning_profiles = None
+ if profile:
+@@ -502,8 +504,9 @@
+ valid_provisioning_profiles[app_id_pattern] = (
+ profile_path, profile_data, team_identifier)
+ if not valid_provisioning_profiles:
+- print >>sys.stderr, (
+- 'cannot find mobile provisioning for %s' % bundle_identifier)
++ print((
++ 'cannot find mobile provisioning for %s' % bundle_identifier),
++ file=sys.stderr)
+ sys.exit(1)
+ # If the user has multiple provisioning profiles installed that can be
+ # used for ${bundle_identifier}, pick the most specific one (ie. the
+@@ -527,7 +530,7 @@
+
+ def _MergePlist(self, merged_plist, plist):
+ """Merge |plist| into |merged_plist|."""
+- for key, value in plist.iteritems():
++ for key, value in plist.items():
+ if isinstance(value, dict):
+ merged_value = merged_plist.get(key, {})
+ if isinstance(merged_value, dict):
+@@ -637,7 +640,7 @@
+ the key was not found.
+ """
+ if isinstance(data, str):
+- for key, value in substitutions.iteritems():
++ for key, value in substitutions.items():
+ data = data.replace('$(%s)' % key, value)
+ return data
+ if isinstance(data, list):
+diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/pylib/gyp/msvs_emulation.py
+index 6d5b5bd..63d40e6 100644
+--- a/tools/gyp/pylib/gyp/msvs_emulation.py
++++ b/tools/gyp/pylib/gyp/msvs_emulation.py
+@@ -7,6 +7,7 @@
+ build systems, primarily ninja.
+ """
+
++import collections
+ import os
+ import re
+ import subprocess
+@@ -16,6 +17,12 @@
+ import gyp.MSVSUtil
+ import gyp.MSVSVersion
+
++try:
++ # basestring was removed in python3.
++ basestring
++except NameError:
++ basestring = str
++
+
+ windows_quoter_regex = re.compile(r'(\\*)"')
+
+@@ -84,8 +91,8 @@
+ """Add |prefix| to |element| or each subelement if element is iterable."""
+ if element is None:
+ return element
+- # Note, not Iterable because we don't want to handle strings like that.
+- if isinstance(element, list) or isinstance(element, tuple):
++ if (isinstance(element, collections.Iterable) and
++ not isinstance(element, basestring)):
+ return [prefix + e for e in element]
+ else:
+ return prefix + element
+@@ -97,7 +104,8 @@
+ if map is not None and element is not None:
+ if not callable(map):
+ map = map.get # Assume it's a dict, otherwise a callable to do the remap.
+- if isinstance(element, list) or isinstance(element, tuple):
++ if (isinstance(element, collections.Iterable) and
++ not isinstance(element, basestring)):
+ element = filter(None, [map(elem) for elem in element])
+ else:
+ element = map(element)
+@@ -109,7 +117,8 @@
+ then add |element| to it, adding each item in |element| if it's a list or
+ tuple."""
+ if append is not None and element is not None:
+- if isinstance(element, list) or isinstance(element, tuple):
++ if (isinstance(element, collections.Iterable) and
++ not isinstance(element, basestring)):
+ append.extend(element)
+ else:
+ append.append(element)
+@@ -209,7 +218,7 @@
+ configs = spec['configurations']
+ for field, default in supported_fields:
+ setattr(self, field, {})
+- for configname, config in configs.iteritems():
++ for configname, config in configs.items():
+ getattr(self, field)[configname] = config.get(field, default())
+
+ self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
+@@ -482,7 +491,7 @@
+ # https://msdn.microsoft.com/en-us/library/dn502518.aspx
+ cflags.append('/FS')
+ # ninja handles parallelism by itself, don't have the compiler do it too.
+- cflags = filter(lambda x: not x.startswith('/MP'), cflags)
++ cflags = [x for x in cflags if not x.startswith('/MP')]
+ return cflags
+
+ def _GetPchFlags(self, config, extension):
+@@ -649,19 +658,17 @@
+
+ # If the base address is not specifically controlled, DYNAMICBASE should
+ # be on by default.
+- base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
+- ldflags)
+- if not base_flags:
++ if not any('DYNAMICBASE' in flag or flag == '/FIXED' for flag in ldflags):
+ ldflags.append('/DYNAMICBASE')
+
+ # If the NXCOMPAT flag has not been specified, default to on. Despite the
+ # documentation that says this only defaults to on when the subsystem is
+ # Vista or greater (which applies to the linker), the IDE defaults it on
+ # unless it's explicitly off.
+- if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
++ if not any('NXCOMPAT' in flag for flag in ldflags):
+ ldflags.append('/NXCOMPAT')
+
+- have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
++ have_def_file = any(flag.startswith('/DEF:') for flag in ldflags)
+ manifest_flags, intermediate_manifest, manifest_files = \
+ self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
+ is_executable and not have_def_file, build_dir)
+@@ -953,7 +960,7 @@
+ """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
+ for the canonical way to retrieve a suitable dict."""
+ if '$' in string:
+- for old, new in expansions.iteritems():
++ for old, new in expansions.items():
+ assert '$(' not in new, new
+ string = string.replace(old, new)
+ return string
+@@ -1001,7 +1008,7 @@
+ CreateProcess documentation for more details."""
+ block = ''
+ nul = '\0'
+- for key, value in envvar_dict.iteritems():
++ for key, value in envvar_dict.items():
+ block += key + '=' + value + nul
+ block += nul
+ return block
+@@ -1056,7 +1063,7 @@
+ env['INCLUDE'] = ';'.join(system_includes)
+
+ env_block = _FormatAsEnvironmentBlock(env)
+- f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
++ f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'w')
+ f.write(env_block)
+ f.close()
+
+@@ -1078,7 +1085,7 @@
+ if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
+ no_specials = filter(lambda x: '$' not in x, sources)
+ relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
+- missing = filter(lambda x: not os.path.exists(x), relative)
++ missing = [x for x in relative if not os.path.exists(x)]
+ if missing:
+ # They'll look like out\Release\..\..\stuff\things.cc, so normalize the
+ # path for a slightly less crazy looking output.
+diff --git a/tools/gyp/pylib/gyp/ordered_dict.py b/pylib/gyp/ordered_dict.py
+deleted file mode 100644
+index a1e89f9..0000000
+--- a/tools/gyp/pylib/gyp/ordered_dict.py
++++ /dev/null
+@@ -1,289 +0,0 @@
+-# Unmodified from http://code.activestate.com/recipes/576693/
+-# other than to add MIT license header (as specified on page, but not in code).
+-# Linked from Python documentation here:
+-# http://docs.python.org/2/library/collections.html#collections.OrderedDict
+-#
+-# This should be deleted once Py2.7 is available on all bots, see
+-# http://crbug.com/241769.
+-#
+-# Copyright (c) 2009 Raymond Hettinger.
+-#
+-# Permission is hereby granted, free of charge, to any person obtaining a copy
+-# of this software and associated documentation files (the "Software"), to deal
+-# in the Software without restriction, including without limitation the rights
+-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+-# copies of the Software, and to permit persons to whom the Software is
+-# furnished to do so, subject to the following conditions:
+-#
+-# The above copyright notice and this permission notice shall be included in
+-# all copies or substantial portions of the Software.
+-#
+-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+-# THE SOFTWARE.
+-
+-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+-# Passes Python2.7's test suite and incorporates all the latest updates.
+-
+-try:
+- from thread import get_ident as _get_ident
+-except ImportError:
+- from dummy_thread import get_ident as _get_ident
+-
+-try:
+- from _abcoll import KeysView, ValuesView, ItemsView
+-except ImportError:
+- pass
+-
+-
+-class OrderedDict(dict):
+- 'Dictionary that remembers insertion order'
+- # An inherited dict maps keys to values.
+- # The inherited dict provides __getitem__, __len__, __contains__, and get.
+- # The remaining methods are order-aware.
+- # Big-O running times for all methods are the same as for regular dictionaries.
+-
+- # The internal self.__map dictionary maps keys to links in a doubly linked list.
+- # The circular doubly linked list starts and ends with a sentinel element.
+- # The sentinel element never gets deleted (this simplifies the algorithm).
+- # Each link is stored as a list of length three: [PREV, NEXT, KEY].
+-
+- def __init__(self, *args, **kwds):
+- '''Initialize an ordered dictionary. Signature is the same as for
+- regular dictionaries, but keyword arguments are not recommended
+- because their insertion order is arbitrary.
+-
+- '''
+- if len(args) > 1:
+- raise TypeError('expected at most 1 arguments, got %d' % len(args))
+- try:
+- self.__root
+- except AttributeError:
+- self.__root = root = [] # sentinel node
+- root[:] = [root, root, None]
+- self.__map = {}
+- self.__update(*args, **kwds)
+-
+- def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+- 'od.__setitem__(i, y) <==> od[i]=y'
+- # Setting a new item creates a new link which goes at the end of the linked
+- # list, and the inherited dictionary is updated with the new key/value pair.
+- if key not in self:
+- root = self.__root
+- last = root[0]
+- last[1] = root[0] = self.__map[key] = [last, root, key]
+- dict_setitem(self, key, value)
+-
+- def __delitem__(self, key, dict_delitem=dict.__delitem__):
+- 'od.__delitem__(y) <==> del od[y]'
+- # Deleting an existing item uses self.__map to find the link which is
+- # then removed by updating the links in the predecessor and successor nodes.
+- dict_delitem(self, key)
+- link_prev, link_next, key = self.__map.pop(key)
+- link_prev[1] = link_next
+- link_next[0] = link_prev
+-
+- def __iter__(self):
+- 'od.__iter__() <==> iter(od)'
+- root = self.__root
+- curr = root[1]
+- while curr is not root:
+- yield curr[2]
+- curr = curr[1]
+-
+- def __reversed__(self):
+- 'od.__reversed__() <==> reversed(od)'
+- root = self.__root
+- curr = root[0]
+- while curr is not root:
+- yield curr[2]
+- curr = curr[0]
+-
+- def clear(self):
+- 'od.clear() -> None. Remove all items from od.'
+- try:
+- for node in self.__map.itervalues():
+- del node[:]
+- root = self.__root
+- root[:] = [root, root, None]
+- self.__map.clear()
+- except AttributeError:
+- pass
+- dict.clear(self)
+-
+- def popitem(self, last=True):
+- '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+- Pairs are returned in LIFO order if last is true or FIFO order if false.
+-
+- '''
+- if not self:
+- raise KeyError('dictionary is empty')
+- root = self.__root
+- if last:
+- link = root[0]
+- link_prev = link[0]
+- link_prev[1] = root
+- root[0] = link_prev
+- else:
+- link = root[1]
+- link_next = link[1]
+- root[1] = link_next
+- link_next[0] = root
+- key = link[2]
+- del self.__map[key]
+- value = dict.pop(self, key)
+- return key, value
+-
+- # -- the following methods do not depend on the internal structure --
+-
+- def keys(self):
+- 'od.keys() -> list of keys in od'
+- return list(self)
+-
+- def values(self):
+- 'od.values() -> list of values in od'
+- return [self[key] for key in self]
+-
+- def items(self):
+- 'od.items() -> list of (key, value) pairs in od'
+- return [(key, self[key]) for key in self]
+-
+- def iterkeys(self):
+- 'od.iterkeys() -> an iterator over the keys in od'
+- return iter(self)
+-
+- def itervalues(self):
+- 'od.itervalues -> an iterator over the values in od'
+- for k in self:
+- yield self[k]
+-
+- def iteritems(self):
+- 'od.iteritems -> an iterator over the (key, value) items in od'
+- for k in self:
+- yield (k, self[k])
+-
+- # Suppress 'OrderedDict.update: Method has no argument':
+- # pylint: disable=E0211
+- def update(*args, **kwds):
+- '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
+-
+- If E is a dict instance, does: for k in E: od[k] = E[k]
+- If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
+- Or if E is an iterable of items, does: for k, v in E: od[k] = v
+- In either case, this is followed by: for k, v in F.items(): od[k] = v
+-
+- '''
+- if len(args) > 2:
+- raise TypeError('update() takes at most 2 positional '
+- 'arguments (%d given)' % (len(args),))
+- elif not args:
+- raise TypeError('update() takes at least 1 argument (0 given)')
+- self = args[0]
+- # Make progressively weaker assumptions about "other"
+- other = ()
+- if len(args) == 2:
+- other = args[1]
+- if isinstance(other, dict):
+- for key in other:
+- self[key] = other[key]
+- elif hasattr(other, 'keys'):
+- for key in other.keys():
+- self[key] = other[key]
+- else:
+- for key, value in other:
+- self[key] = value
+- for key, value in kwds.items():
+- self[key] = value
+-
+- __update = update # let subclasses override update without breaking __init__
+-
+- __marker = object()
+-
+- def pop(self, key, default=__marker):
+- '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+- If key is not found, d is returned if given, otherwise KeyError is raised.
+-
+- '''
+- if key in self:
+- result = self[key]
+- del self[key]
+- return result
+- if default is self.__marker:
+- raise KeyError(key)
+- return default
+-
+- def setdefault(self, key, default=None):
+- 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+- if key in self:
+- return self[key]
+- self[key] = default
+- return default
+-
+- def __repr__(self, _repr_running={}):
+- 'od.__repr__() <==> repr(od)'
+- call_key = id(self), _get_ident()
+- if call_key in _repr_running:
+- return '...'
+- _repr_running[call_key] = 1
+- try:
+- if not self:
+- return '%s()' % (self.__class__.__name__,)
+- return '%s(%r)' % (self.__class__.__name__, self.items())
+- finally:
+- del _repr_running[call_key]
+-
+- def __reduce__(self):
+- 'Return state information for pickling'
+- items = [[k, self[k]] for k in self]
+- inst_dict = vars(self).copy()
+- for k in vars(OrderedDict()):
+- inst_dict.pop(k, None)
+- if inst_dict:
+- return (self.__class__, (items,), inst_dict)
+- return self.__class__, (items,)
+-
+- def copy(self):
+- 'od.copy() -> a shallow copy of od'
+- return self.__class__(self)
+-
+- @classmethod
+- def fromkeys(cls, iterable, value=None):
+- '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+- and values equal to v (which defaults to None).
+-
+- '''
+- d = cls()
+- for key in iterable:
+- d[key] = value
+- return d
+-
+- def __eq__(self, other):
+- '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+- while comparison to a regular mapping is order-insensitive.
+-
+- '''
+- if isinstance(other, OrderedDict):
+- return len(self)==len(other) and self.items() == other.items()
+- return dict.__eq__(self, other)
+-
+- def __ne__(self, other):
+- return not self == other
+-
+- # -- the following methods are only used in Python 2.7 --
+-
+- def viewkeys(self):
+- "od.viewkeys() -> a set-like object providing a view on od's keys"
+- return KeysView(self)
+-
+- def viewvalues(self):
+- "od.viewvalues() -> an object providing a view on od's values"
+- return ValuesView(self)
+-
+- def viewitems(self):
+- "od.viewitems() -> a set-like object providing a view on od's items"
+- return ItemsView(self)
+-
+diff --git a/tools/gyp/pylib/gyp/simple_copy.py b/pylib/gyp/simple_copy.py
+index eaf5f8b..58a61c3 100644
+--- a/tools/gyp/pylib/gyp/simple_copy.py
++++ b/tools/gyp/pylib/gyp/simple_copy.py
+@@ -49,7 +49,7 @@
+
+ def _deepcopy_dict(x):
+ y = {}
+- for key, value in x.iteritems():
++ for key, value in x.items():
+ y[deepcopy(key)] = deepcopy(value)
+ return y
+ d[dict] = _deepcopy_dict
+diff --git a/tools/gyp/pylib/gyp/win_tool.py b/pylib/gyp/win_tool.py
+index 1c843a0..8973484 100755
+--- a/tools/gyp/pylib/gyp/win_tool.py
++++ b/tools/gyp/pylib/gyp/win_tool.py
+@@ -9,6 +9,8 @@
+ These functions are executed via gyp-win-tool when using the ninja generator.
+ """
+
++from __future__ import print_function
++
+ import os
+ import re
+ import shutil
+@@ -134,7 +136,7 @@
+ if (not line.startswith(' Creating library ') and
+ not line.startswith('Generating code') and
+ not line.startswith('Finished generating code')):
+- print line
++ print(line)
+ return link.returncode
+
+ def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
+@@ -193,16 +195,18 @@
+ our_manifest = '%(out)s.manifest' % variables
+ # Load and normalize the manifests. mt.exe sometimes removes whitespace,
+ # and sometimes doesn't unfortunately.
+- with open(our_manifest, 'rb') as our_f:
+- with open(assert_manifest, 'rb') as assert_f:
++ with open(our_manifest, 'r') as our_f:
++ with open(assert_manifest, 'r') as assert_f:
+ our_data = our_f.read().translate(None, string.whitespace)
+ assert_data = assert_f.read().translate(None, string.whitespace)
+ if our_data != assert_data:
+ os.unlink(out)
+ def dump(filename):
+- sys.stderr.write('%s\n-----\n' % filename)
+- with open(filename, 'rb') as f:
+- sys.stderr.write(f.read() + '\n-----\n')
++ print(filename, file=sys.stderr)
++ print('-----', file=sys.stderr)
++ with open(filename, 'r') as f:
++ print(f.read(), file=sys.stderr)
++ print('-----', file=sys.stderr)
+ dump(intermediate_manifest)
+ dump(our_manifest)
+ dump(assert_manifest)
+@@ -223,7 +227,7 @@
+ out, _ = popen.communicate()
+ for line in out.splitlines():
+ if line and 'manifest authoring warning 81010002' not in line:
+- print line
++ print(line)
+ return popen.returncode
+
+ def ExecManifestToRc(self, arch, *args):
+@@ -231,7 +235,7 @@
+ |args| is tuple containing path to resource file, path to manifest file
+ and resource name which can be "1" (for executables) or "2" (for DLLs)."""
+ manifest_path, resource_path, resource_name = args
+- with open(resource_path, 'wb') as output:
++ with open(resource_path, 'w') as output:
+ output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
+ resource_name,
+ os.path.abspath(manifest_path).replace('\\', '/')))
+@@ -263,7 +267,7 @@
+ for x in lines if x.startswith(prefixes))
+ for line in lines:
+ if not line.startswith(prefixes) and line not in processing:
+- print line
++ print(line)
+ return popen.returncode
+
+ def ExecAsmWrapper(self, arch, *args):
+@@ -277,7 +281,7 @@
+ not line.startswith('Microsoft (R) Macro Assembler') and
+ not line.startswith(' Assembling: ') and
+ line):
+- print line
++ print(line)
+ return popen.returncode
+
+ def ExecRcWrapper(self, arch, *args):
+@@ -291,7 +295,7 @@
+ if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
+ not line.startswith('Copyright (C) Microsoft Corporation') and
+ line):
+- print line
++ print(line)
+ return popen.returncode
+
+ def ExecActionWrapper(self, arch, rspfile, *dir):
+@@ -300,7 +304,7 @@
+ env = self._GetEnv(arch)
+ # TODO(scottmg): This is a temporary hack to get some specific variables
+ # through to actions that are set after gyp-time. http://crbug.com/333738.
+- for k, v in os.environ.iteritems():
++ for k, v in os.environ.items():
+ if k not in env:
+ env[k] = v
+ args = open(rspfile).read()
+diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/pylib/gyp/xcode_emulation.py
+index dba8e76..4c875de 100644
+--- a/tools/gyp/pylib/gyp/xcode_emulation.py
++++ b/tools/gyp/pylib/gyp/xcode_emulation.py
+@@ -7,6 +7,8 @@
+ other build systems, such as make and ninja.
+ """
+
++from __future__ import print_function
++
+ import copy
+ import gyp.common
+ import os
+@@ -73,7 +75,7 @@
+ if arch not in expanded_archs:
+ expanded_archs.append(arch)
+ except KeyError as e:
+- print 'Warning: Ignoring unsupported variable "%s".' % variable
++ print('Warning: Ignoring unsupported variable "%s".' % variable)
+ elif arch not in expanded_archs:
+ expanded_archs.append(arch)
+ return expanded_archs
+@@ -171,7 +173,7 @@
+ # the same for all configs are implicitly per-target settings.
+ self.xcode_settings = {}
+ configs = spec['configurations']
+- for configname, config in configs.iteritems():
++ for configname, config in configs.items():
+ self.xcode_settings[configname] = config.get('xcode_settings', {})
+ self._ConvertConditionalKeys(configname)
+ if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
+@@ -197,8 +199,8 @@
+ new_key = key.split("[")[0]
+ settings[new_key] = settings[key]
+ else:
+- print 'Warning: Conditional keys not implemented, ignoring:', \
+- ' '.join(conditional_keys)
++ print('Warning: Conditional keys not implemented, ignoring:', \
++ ' '.join(conditional_keys))
+ del settings[key]
+
+ def _Settings(self):
+@@ -216,7 +218,7 @@
+
+ def _WarnUnimplemented(self, test_key):
+ if test_key in self._Settings():
+- print 'Warning: Ignoring not yet implemented key "%s".' % test_key
++ print('Warning: Ignoring not yet implemented key "%s".' % test_key)
+
+ def IsBinaryOutputFormat(self, configname):
+ default = "binary" if self.isIOS else "xml"
+@@ -963,7 +965,7 @@
+ result = dict(self.xcode_settings[configname])
+ first_pass = False
+ else:
+- for key, value in self.xcode_settings[configname].iteritems():
++ for key, value in self.xcode_settings[configname].items():
+ if key not in result:
+ continue
+ elif result[key] != value:
+@@ -1084,8 +1086,8 @@
+ unimpl = ['OTHER_CODE_SIGN_FLAGS']
+ unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
+ if unimpl:
+- print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
+- ', '.join(sorted(unimpl)))
++ print('Warning: Some codesign keys not implemented, ignoring: %s' % (
++ ', '.join(sorted(unimpl))))
+
+ if self._IsXCTest():
+ # For device xctests, Xcode copies two extra frameworks into $TEST_HOST.
+@@ -1737,7 +1739,7 @@
+ order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
+ order.reverse()
+ return order
+- except gyp.common.CycleError, e:
++ except gyp.common.CycleError as e:
+ raise GypError(
+ 'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
+
+@@ -1774,10 +1776,11 @@
+ def _AddIOSDeviceConfigurations(targets):
+ """Clone all targets and append -iphoneos to the name. Configure these targets
+ to build for iOS devices and use correct architectures for those builds."""
+- for target_dict in targets.itervalues():
++ for target_dict in targets.values():
+ toolset = target_dict['toolset']
+ configs = target_dict['configurations']
+- for config_name, simulator_config_dict in dict(configs).iteritems():
++
++ for config_name, simulator_config_dict in dict(configs).items():
+ iphoneos_config_dict = copy.deepcopy(simulator_config_dict)
+ configs[config_name + '-iphoneos'] = iphoneos_config_dict
+ configs[config_name + '-iphonesimulator'] = simulator_config_dict
+diff --git a/tools/gyp/pylib/gyp/xcode_ninja.py b/pylib/gyp/xcode_ninja.py
+index bc76fff..1d71b8c 100644
+--- a/tools/gyp/pylib/gyp/xcode_ninja.py
++++ b/tools/gyp/pylib/gyp/xcode_ninja.py
+@@ -28,7 +28,7 @@
+ workspace_path = os.path.join(options.generator_output, workspace_path)
+ try:
+ os.makedirs(workspace_path)
+- except OSError, e:
++ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
+@@ -85,7 +85,7 @@
+ "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
+
+ if 'configurations' in old_spec:
+- for config in old_spec['configurations'].iterkeys():
++ for config in old_spec['configurations'].keys():
+ old_xcode_settings = \
+ old_spec['configurations'][config].get('xcode_settings', {})
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
+@@ -167,7 +167,7 @@
+ params: Dict of global options for gyp.
+ """
+ orig_gyp = params['build_files'][0]
+- for gyp_name, gyp_dict in data.iteritems():
++ for gyp_name, gyp_dict in data.items():
+ if gyp_name == orig_gyp:
+ depth = gyp_dict['_DEPTH']
+
+@@ -238,7 +238,7 @@
+ not generator_flags.get('xcode_ninja_list_excluded_files', True)
+
+ sources = []
+- for target, target_dict in target_dicts.iteritems():
++ for target, target_dict in target_dicts.items():
+ base = os.path.dirname(target)
+ files = target_dict.get('sources', []) + \
+ target_dict.get('mac_bundle_resources', [])
+diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/pylib/gyp/xcodeproj_file.py
+index e69235f..bd238f6 100644
+--- a/tools/gyp/pylib/gyp/xcodeproj_file.py
++++ b/tools/gyp/pylib/gyp/xcodeproj_file.py
+@@ -154,6 +154,11 @@
+ import sha
+ _new_sha1 = sha.new
+
++try:
++ # basestring was removed in python3.
++ basestring
++except NameError:
++ basestring = str
+
+ # See XCObject._EncodeString. This pattern is used to determine when a string
+ # can be printed unquoted. Strings that match this pattern may be printed
+@@ -314,7 +319,7 @@
+ """
+
+ that = self.__class__(id=self.id, parent=self.parent)
+- for key, value in self._properties.iteritems():
++ for key, value in self._properties.items():
+ is_strong = self._schema[key][2]
+
+ if isinstance(value, XCObject):
+@@ -324,8 +329,7 @@
+ that._properties[key] = new_value
+ else:
+ that._properties[key] = value
+- elif isinstance(value, str) or isinstance(value, unicode) or \
+- isinstance(value, int):
++ elif isinstance(value, basestring) or isinstance(value, int):
+ that._properties[key] = value
+ elif isinstance(value, list):
+ if is_strong:
+@@ -449,10 +453,10 @@
+ # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
+ # into the portion that gets used.
+ assert hash.digest_size % 4 == 0
+- digest_int_count = hash.digest_size / 4
++ digest_int_count = hash.digest_size // 4
+ digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
+ id_ints = [0, 0, 0]
+- for index in xrange(0, digest_int_count):
++ for index in range(0, digest_int_count):
+ id_ints[index % 3] ^= digest_ints[index]
+ self.id = '%08X%08X%08X' % tuple(id_ints)
+
+@@ -475,7 +479,7 @@
+ """Returns a list of all of this object's owned (strong) children."""
+
+ children = []
+- for property, attributes in self._schema.iteritems():
++ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong) = attributes[0:3]
+ if is_strong and property in self._properties:
+ if not is_list:
+@@ -603,7 +607,12 @@
+ comment = value.Comment()
+ elif isinstance(value, str):
+ printable += self._EncodeString(value)
+- elif isinstance(value, unicode):
++ # A python3 compatible way of saying isinstance(value, unicode).
++ # basestring is str in python3 so this is equivalent to the above
++ # isinstance. Thus if it failed above it will fail here.
++ # In python2 we test against str and unicode at this point. str has already
++ # failed in the above isinstance so we test against unicode.
++ elif isinstance(value, basestring):
+ printable += self._EncodeString(value.encode('utf-8'))
+ elif isinstance(value, int):
+ printable += str(value)
+@@ -622,7 +631,7 @@
+ printable += end_tabs + ')'
+ elif isinstance(value, dict):
+ printable = '{' + sep
+- for item_key, item_value in sorted(value.iteritems()):
++ for item_key, item_value in sorted(value.items()):
+ printable += element_tabs + \
+ self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
+ self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
+@@ -691,7 +700,7 @@
+ printable_value[0] == '"' and printable_value[-1] == '"':
+ printable_value = printable_value[1:-1]
+ printable += printable_key + ' = ' + printable_value + ';' + after_kv
+- except TypeError, e:
++ except TypeError as e:
+ gyp.common.ExceptionAppend(e,
+ 'while printing key "%s"' % key)
+ raise
+@@ -730,7 +739,7 @@
+ self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
+
+ # The remaining elements of an object dictionary are sorted alphabetically.
+- for property, value in sorted(self._properties.iteritems()):
++ for property, value in sorted(self._properties.items()):
+ self._XCKVPrint(file, 3, property, value)
+
+ # End the object.
+@@ -752,7 +761,7 @@
+ if properties is None:
+ return
+
+- for property, value in properties.iteritems():
++ for property, value in properties.items():
+ # Make sure the property is in the schema.
+ if not property in self._schema:
+ raise KeyError(property + ' not in ' + self.__class__.__name__)
+@@ -766,7 +775,7 @@
+ ' must be list, not ' + value.__class__.__name__)
+ for item in value:
+ if not isinstance(item, property_type) and \
+- not (item.__class__ == unicode and property_type == str):
++ not (isinstance(item, basestring) and property_type == str):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError(
+@@ -774,7 +783,7 @@
+ ' must be ' + property_type.__name__ + ', not ' + \
+ item.__class__.__name__)
+ elif not isinstance(value, property_type) and \
+- not (value.__class__ == unicode and property_type == str):
++ not (isinstance(value, basestring) and property_type == str):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError(
+@@ -788,8 +797,7 @@
+ self._properties[property] = value.Copy()
+ else:
+ self._properties[property] = value
+- elif isinstance(value, str) or isinstance(value, unicode) or \
+- isinstance(value, int):
++ elif isinstance(value, basestring) or isinstance(value, int):
+ self._properties[property] = value
+ elif isinstance(value, list):
+ if is_strong:
+@@ -865,7 +873,7 @@
+
+ # TODO(mark): A stronger verification mechanism is needed. Some
+ # subclasses need to perform validation beyond what the schema can enforce.
+- for property, attributes in self._schema.iteritems():
++ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if is_required and not property in self._properties:
+ raise KeyError(self.__class__.__name__ + ' requires ' + property)
+@@ -875,7 +883,7 @@
+ overwrite properties that have already been set."""
+
+ defaults = {}
+- for property, attributes in self._schema.iteritems():
++ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if is_required and len(attributes) >= 5 and \
+ not property in self._properties:
+@@ -1426,8 +1434,8 @@
+ xche = self
+ while xche != None and isinstance(xche, XCHierarchicalElement):
+ xche_hashables = xche.Hashables()
+- for index in xrange(0, len(xche_hashables)):
+- hashables.insert(index, xche_hashables[index])
++ for index, xche_hashable in enumerate(xche_hashables):
++ hashables.insert(index, xche_hashable)
+ xche = xche.parent
+ return hashables
+
+@@ -2468,8 +2476,7 @@
+ # The headers phase should come before the resources, sources, and
+ # frameworks phases, if any.
+ insert_at = len(self._properties['buildPhases'])
+- for index in xrange(0, len(self._properties['buildPhases'])):
+- phase = self._properties['buildPhases'][index]
++ for index, phase in enumerate(self._properties['buildPhases']):
+ if isinstance(phase, PBXResourcesBuildPhase) or \
+ isinstance(phase, PBXSourcesBuildPhase) or \
+ isinstance(phase, PBXFrameworksBuildPhase):
+@@ -2489,8 +2496,7 @@
+ # The resources phase should come before the sources and frameworks
+ # phases, if any.
+ insert_at = len(self._properties['buildPhases'])
+- for index in xrange(0, len(self._properties['buildPhases'])):
+- phase = self._properties['buildPhases'][index]
++ for index, phase in enumerate(self._properties['buildPhases']):
+ if isinstance(phase, PBXSourcesBuildPhase) or \
+ isinstance(phase, PBXFrameworksBuildPhase):
+ insert_at = index
+@@ -2911,7 +2917,7 @@
+ # determine the sort order.
+ return cmp(x_index, y_index)
+
+- for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
++ for other_pbxproject, ref_dict in self._other_pbxprojects.items():
+ # Build up a list of products in the remote project file, ordered the
+ # same as the targets that produce them.
+ remote_products = []
+diff --git a/tools/gyp/pylib/gyp/xml_fix.py b/pylib/gyp/xml_fix.py
+index 5de8481..4308d99 100644
+--- a/tools/gyp/pylib/gyp/xml_fix.py
++++ b/tools/gyp/pylib/gyp/xml_fix.py
+@@ -32,8 +32,7 @@
+ writer.write(indent+"<" + self.tagName)
+
+ attrs = self._get_attributes()
+- a_names = attrs.keys()
+- a_names.sort()
++ a_names = sorted(attrs.keys())
+
+ for a_name in a_names:
+ writer.write(" %s=\"" % a_name)
+diff --git a/tools/gyp/tools/graphviz.py b/tools/graphviz.py
+index 326ae22..538b059 100755
+--- a/tools/gyp/tools/graphviz.py
++++ b/tools/gyp/tools/graphviz.py
+@@ -8,6 +8,8 @@
+ generate input suitable for graphviz to render a dependency graph of
+ targets."""
+
++from __future__ import print_function
++
+ import collections
+ import json
+ import sys
+@@ -50,9 +52,9 @@
+ build_file, target_name, toolset = ParseTarget(src)
+ files[build_file].append(src)
+
+- print 'digraph D {'
+- print ' fontsize=8' # Used by subgraphs.
+- print ' node [fontsize=8]'
++ print('digraph D {')
++ print(' fontsize=8') # Used by subgraphs.
++ print(' node [fontsize=8]')
+
+ # Output nodes by file. We must first write out each node within
+ # its file grouping before writing out any edges that may refer
+@@ -63,31 +65,31 @@
+ # the display by making it a box without an internal node.
+ target = targets[0]
+ build_file, target_name, toolset = ParseTarget(target)
+- print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
+- target_name)
++ print(' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
++ target_name))
+ else:
+ # Group multiple nodes together in a subgraph.
+- print ' subgraph "cluster_%s" {' % filename
+- print ' label = "%s"' % filename
++ print(' subgraph "cluster_%s" {' % filename)
++ print(' label = "%s"' % filename)
+ for target in targets:
+ build_file, target_name, toolset = ParseTarget(target)
+- print ' "%s" [label="%s"]' % (target, target_name)
+- print ' }'
++ print(' "%s" [label="%s"]' % (target, target_name))
++ print(' }')
+
+ # Now that we've placed all the nodes within subgraphs, output all
+ # the edges between nodes.
+ for src, dsts in edges.items():
+ for dst in dsts:
+- print ' "%s" -> "%s"' % (src, dst)
++ print(' "%s" -> "%s"' % (src, dst))
+
+- print '}'
++ print('}')
+
+
+ def main():
+ if len(sys.argv) < 2:
+- print >>sys.stderr, __doc__
+- print >>sys.stderr
+- print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
++ print(__doc__, file=sys.stderr)
++ print(file=sys.stderr)
++ print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr)
+ return 1
+
+ edges = LoadEdges('dump.json', sys.argv[1:])
+diff --git a/tools/gyp/tools/pretty_gyp.py b/tools/pretty_gyp.py
+index d5736bb..5060d1d 100755
+--- a/tools/gyp/tools/pretty_gyp.py
++++ b/tools/gyp/tools/pretty_gyp.py
+@@ -6,6 +6,8 @@
+
+ """Pretty-prints the contents of a GYP file."""
+
++from __future__ import print_function
++
+ import sys
+ import re
+
+@@ -125,15 +127,15 @@
+ (brace_diff, after) = count_braces(line)
+ if brace_diff != 0:
+ if after:
+- print " " * (basic_offset * indent) + line
++ print(" " * (basic_offset * indent) + line)
+ indent += brace_diff
+ else:
+ indent += brace_diff
+- print " " * (basic_offset * indent) + line
++ print(" " * (basic_offset * indent) + line)
+ else:
+- print " " * (basic_offset * indent) + line
++ print(" " * (basic_offset * indent) + line)
+ else:
+- print ""
++ print("")
+ last_line = line
+
+
+diff --git a/tools/gyp/tools/pretty_sln.py b/tools/pretty_sln.py
+index ca8cf4a..12a6dad 100755
+--- a/tools/gyp/tools/pretty_sln.py
++++ b/tools/gyp/tools/pretty_sln.py
+@@ -12,6 +12,8 @@
+ Then it outputs a possible build order.
+ """
+
++from __future__ import print_function
++
+ __author__ = 'nsylvain (Nicolas Sylvain)'
+
+ import os
+@@ -26,7 +28,7 @@
+ for dep in deps[project]:
+ if dep not in built:
+ BuildProject(dep, built, projects, deps)
+- print project
++ print(project)
+ built.append(project)
+
+ def ParseSolution(solution_file):
+@@ -100,44 +102,44 @@
+ return (projects, dependencies)
+
+ def PrintDependencies(projects, deps):
+- print "---------------------------------------"
+- print "Dependencies for all projects"
+- print "---------------------------------------"
+- print "-- --"
++ print("---------------------------------------")
++ print("Dependencies for all projects")
++ print("---------------------------------------")
++ print("-- --")
+
+ for (project, dep_list) in sorted(deps.items()):
+- print "Project : %s" % project
+- print "Path : %s" % projects[project][0]
++ print("Project : %s" % project)
++ print("Path : %s" % projects[project][0])
+ if dep_list:
+ for dep in dep_list:
+- print " - %s" % dep
+- print ""
++ print(" - %s" % dep)
++ print("")
+
+- print "-- --"
++ print("-- --")
+
+ def PrintBuildOrder(projects, deps):
+- print "---------------------------------------"
+- print "Build order "
+- print "---------------------------------------"
+- print "-- --"
++ print("---------------------------------------")
++ print("Build order ")
++ print("---------------------------------------")
++ print("-- --")
+
+ built = []
+ for (project, _) in sorted(deps.items()):
+ if project not in built:
+ BuildProject(project, built, projects, deps)
+
+- print "-- --"
++ print("-- --")
+
+ def PrintVCProj(projects):
+
+ for project in projects:
+- print "-------------------------------------"
+- print "-------------------------------------"
+- print project
+- print project
+- print project
+- print "-------------------------------------"
+- print "-------------------------------------"
++ print("-------------------------------------")
++ print("-------------------------------------")
++ print(project)
++ print(project)
++ print(project)
++ print("-------------------------------------")
++ print("-------------------------------------")
+
+ project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
+ projects[project][2]))
+@@ -153,7 +155,7 @@
+ def main():
+ # check if we have exactly 1 parameter.
+ if len(sys.argv) < 2:
+- print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
++ print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0])
+ return 1
+
+ (projects, deps) = ParseSolution(sys.argv[1])
+diff --git a/tools/gyp/tools/pretty_vcproj.py b/tools/pretty_vcproj.py
+index 6099bd7..f02e59e 100755
+--- a/tools/gyp/tools/pretty_vcproj.py
++++ b/tools/gyp/tools/pretty_vcproj.py
+@@ -12,6 +12,8 @@
+ It outputs the resulting xml to stdout.
+ """
+
++from __future__ import print_function
++
+ __author__ = 'nsylvain (Nicolas Sylvain)'
+
+ import os
+@@ -73,23 +75,23 @@
+
+ # Print the main tag
+ if attr_count == 0:
+- print '%s<%s>' % (' '*indent, node.nodeName)
++ print('%s<%s>' % (' '*indent, node.nodeName))
+ else:
+- print '%s<%s' % (' '*indent, node.nodeName)
++ print('%s<%s' % (' '*indent, node.nodeName))
+
+ all_attributes = []
+ for (name, value) in node.attributes.items():
+ all_attributes.append((name, value))
+- all_attributes.sort(CmpTuple())
++ all_attributes.sort(key=(lambda attr: attr[0]))
+ for (name, value) in all_attributes:
+- print '%s %s="%s"' % (' '*indent, name, value)
+- print '%s>' % (' '*indent)
++ print('%s %s="%s"' % (' '*indent, name, value))
++ print('%s>' % (' '*indent))
+ if node.nodeValue:
+- print '%s %s' % (' '*indent, node.nodeValue)
++ print('%s %s' % (' '*indent, node.nodeValue))
+
+ for sub_node in node.childNodes:
+ PrettyPrintNode(sub_node, indent=indent+2)
+- print '%s</%s>' % (' '*indent, node.nodeName)
++ print('%s</%s>' % (' '*indent, node.nodeName))
+
+
+ def FlattenFilter(node):
+@@ -283,8 +285,8 @@
+
+ # check if we have exactly 1 parameter.
+ if len(argv) < 2:
+- print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
+- '[key2=value2]' % argv[0])
++ print('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
++ '[key2=value2]' % argv[0])
+ return 1
+
+ # Parse the keys
+diff --git a/tools/gyp/gyptest.py b/gyptest.py
+index 9930e78..1a9ffca 100755
+--- a/tools/gyp/gyptest.py
++++ b/tools/gyp/gyptest.py
+@@ -58,7 +58,7 @@
+ os.chdir(args.chdir)
+
+ if args.path:
+- extra_path = [os.path.abspath(p) for p in opts.path]
++ extra_path = [os.path.abspath(p) for p in args.path]
+ extra_path = os.pathsep.join(extra_path)
+ os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
+
+diff --git a/tools/gyp/pylib/gyp/MSVSNew.py b/pylib/gyp/MSVSNew.py
+index 593f0e5..0445931 100644
+--- a/tools/gyp/pylib/gyp/MSVSNew.py
++++ b/tools/gyp/pylib/gyp/MSVSNew.py
+@@ -21,6 +21,13 @@
+ _new_md5 = md5.new
+
+
++try:
++ # cmp was removed in python3.
++ cmp
++except NameError:
++ def cmp(a, b):
++ return (a > b) - (a < b)
++
+ # Initialize random number generator
+ random.seed()
+
+diff --git a/tools/gyp/pylib/gyp/common.py b/pylib/gyp/common.py
+index 1823de8..b268d22 100644
+--- a/tools/gyp/pylib/gyp/common.py
++++ b/tools/gyp/pylib/gyp/common.py
+@@ -584,7 +584,7 @@
+ graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
+ def GetEdges(node):
+ return re.findall(r'\$\(([^))]\)', graph[node])
+- print TopologicallySorted(graph.keys(), GetEdges)
++ print(TopologicallySorted(graph.keys(), GetEdges))
+ ==>
+ ['a', 'c', b']
+ """
+diff --git a/tools/gyp/pylib/gyp/generator/make.py b/pylib/gyp/generator/make.py
+index 2057e3a..8c2827e 100644
+--- a/tools/gyp/pylib/gyp/generator/make.py
++++ b/tools/gyp/pylib/gyp/generator/make.py
+@@ -1636,7 +1636,7 @@
+ self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
+ postbuilds=postbuilds)
+ else:
+- print("WARNING: no output for", self.type, target)
++ print("WARNING: no output for", self.type, self.target)
+
+ # Add an alias for each target (if there are any outputs).
+ # Installable target aliases are created below.
+diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/pylib/gyp/generator/msvs.py
+index e8a2b36..9eac028 100644
+--- a/tools/gyp/pylib/gyp/generator/msvs.py
++++ b/tools/gyp/pylib/gyp/generator/msvs.py
+@@ -308,10 +308,8 @@
+ if names:
+ return names[0]
+ else:
+- print >> sys.stdout, (
+- 'Warning: No include files found for '
+- 'detected Windows SDK version %s' % (version)
+- )
++ print('Warning: No include files found for '
++ 'detected Windows SDK version %s' % (version))
+
+
+ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
+@@ -2065,7 +2063,7 @@
+ if generator_flags.get('msvs_error_on_missing_sources', False):
+ raise GypError(error_message)
+ else:
+- print("Warning: " + error_message, file=sys.stdout)
++ print("Warning: " + error_message)
+
+
+ def _GenerateMSBuildFiltersFile(filters_path, source_files,
+diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/pylib/gyp/mac_tool.py
+index 7d3a8c2..84f8863 100755
+--- a/tools/gyp/pylib/gyp/mac_tool.py
++++ b/tools/gyp/pylib/gyp/mac_tool.py
+@@ -670,7 +670,7 @@
+ count = len(filelist)
+ capacity = NextGreaterPowerOf2(count)
+ strings_offset = 24 + (12 * capacity)
+- max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
++ max_value_length = len(max(filelist.items(), key=lambda t: len(t[1]))[1])
+
+ out = open(output_name, "wb")
+ out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
+diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/pylib/gyp/xcodeproj_file.py
+index bd238f6..bc9814d 100644
+--- a/tools/gyp/pylib/gyp/xcodeproj_file.py
++++ b/tools/gyp/pylib/gyp/xcodeproj_file.py
+@@ -160,6 +160,13 @@
+ except NameError:
+ basestring = str
+
++try:
++ # cmp was removed in python3.
++ cmp
++except NameError:
++ def cmp(a, b):
++ return (a > b) - (a < b)
++
+ # See XCObject._EncodeString. This pattern is used to determine when a string
+ # can be printed unquoted. Strings that match this pattern may be printed
+ # unquoted. Strings that do not match must be quoted and may be further
+diff --git a/tools/gyp/samples/samples b/samples/samples
+index 804b618..ff26de3 100755
+--- a/tools/gyp/samples/samples
++++ b/tools/gyp/samples/samples
+@@ -4,6 +4,8 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
++from __future__ import print_function
++
+ import os.path
+ import shutil
+ import sys
+@@ -57,7 +59,7 @@
+
+ def Main(argv):
+ if len(argv) != 3 or argv[1] not in ['push', 'pull']:
+- print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0]
++ print('Usage: %s push/pull PATH_TO_CHROME' % argv[0])
+ return 1
+
+ path_to_chrome = argv[2]
+@@ -66,10 +68,10 @@
+ chrome_file = os.path.join(path_to_chrome, g)
+ local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
+ if argv[1] == 'push':
+- print 'Copying %s to %s' % (local_file, chrome_file)
++ print('Copying %s to %s' % (local_file, chrome_file))
+ shutil.copyfile(local_file, chrome_file)
+ elif argv[1] == 'pull':
+- print 'Copying %s to %s' % (chrome_file, local_file)
++ print('Copying %s to %s' % (chrome_file, local_file))
+ shutil.copyfile(chrome_file, local_file)
+ else:
+ assert False
+diff --git a/tools/gyp/tools/pretty_vcproj.py b/tools/pretty_vcproj.py
+index f02e59e..4454d9b 100755
+--- a/tools/gyp/tools/pretty_vcproj.py
++++ b/tools/gyp/tools/pretty_vcproj.py
+@@ -22,6 +22,13 @@
+ from xml.dom.minidom import parse
+ from xml.dom.minidom import Node
+
++try:
++ # cmp was removed in python3.
++ cmp
++except NameError:
++ def cmp(a, b):
++ return (a > b) - (a < b)
++
+ REPLACEMENTS = dict()
+ ARGUMENTS = None
+
+@@ -63,7 +70,7 @@
+ def PrettyPrintNode(node, indent=0):
+ if node.nodeType == Node.TEXT_NODE:
+ if node.data.strip():
+- print '%s%s' % (' '*indent, node.data.strip())
++ print('%s%s' % (' '*indent, node.data.strip()))
+ return
+
+ if node.childNodes:
+@@ -322,7 +329,6 @@
+
+ # Finally, we use the prett xml function to print the vcproj back to the
+ # user.
+- #print dom.toprettyxml(newl="\n")
+ PrettyPrintNode(dom.documentElement)
+ return 0
+
+--- node-v10.15.3/tools/gyp/pylib/gyp/input.py.old 2019-04-02 06:44:13.086310973 +0000
++++ node-v10.15.3/tools/gyp/pylib/gyp/input.py 2019-04-02 06:45:35.987250735 +0000
+@@ -900,6 +900,9 @@
+ (e, contents, build_file))
+
+ p_stdout, p_stderr = p.communicate('')
++ if getattr(p_stdout, 'decode'):
++ p_stdout = p_stdout.decode('utf-8')
++ p_stderr = p_stderr.decode('utf-8')
+
+ if p.wait() != 0 or p_stderr:
+ sys.stderr.write(p_stderr)
diff --git a/user/node/libatomic.patch b/user/node/libatomic.patch
new file mode 100644
index 000000000..fc2e4ffd2
--- /dev/null
+++ b/user/node/libatomic.patch
@@ -0,0 +1,14 @@
+--- node-v10.15.3/node.gyp.old 2019-03-05 15:16:32.000000000 +0000
++++ node-v10.15.3/node.gyp 2019-04-06 13:54:51.204939193 +0000
+@@ -479,6 +479,11 @@
+ 'msvs_disabled_warnings!': [4244],
+
+ 'conditions': [
++ [ 'host_arch=="mips" or host_arch=="mipsel" or host_arch=="ppc"', {
++ 'link_settings': {
++ 'libraries': [ '-latomic' ],
++ },
++ }],
+ [ 'node_code_cache_path!=""', {
+ 'sources': [ '<(node_code_cache_path)' ]
+ }, {
diff --git a/user/node/ppc32.patch b/user/node/ppc32.patch
new file mode 100644
index 000000000..80b97993c
--- /dev/null
+++ b/user/node/ppc32.patch
@@ -0,0 +1,18 @@
+--- node-v10.15.3/deps/v8/src/libsampler/sampler.cc.old 2019-03-05 15:16:28.000000000 +0000
++++ node-v10.15.3/deps/v8/src/libsampler/sampler.cc 2019-04-06 13:44:07.224653262 +0000
+@@ -502,9 +502,15 @@
+ reinterpret_cast<void*>(ucontext->uc_mcontext.regs->gpr[PT_R31]);
+ #else
+ // Some C libraries, notably Musl, define the regs member as a void pointer
++ #if !V8_TARGET_ARCH_32_BIT
+ state->pc = reinterpret_cast<void*>(ucontext->uc_mcontext.gp_regs[32]);
+ state->sp = reinterpret_cast<void*>(ucontext->uc_mcontext.gp_regs[1]);
+ state->fp = reinterpret_cast<void*>(ucontext->uc_mcontext.gp_regs[31]);
++ #else
++ state->pc = reinterpret_cast<void*>(ucontext->uc_mcontext.gregs[32]);
++ state->sp = reinterpret_cast<void*>(ucontext->uc_mcontext.gregs[1]);
++ state->fp = reinterpret_cast<void*>(ucontext->uc_mcontext.gregs[31]);
++ #endif
+ #endif
+ #elif V8_HOST_ARCH_S390
+ #if V8_TARGET_ARCH_32_BIT
diff --git a/user/node/ppc64.patch b/user/node/ppc64.patch
new file mode 100644
index 000000000..f76618562
--- /dev/null
+++ b/user/node/ppc64.patch
@@ -0,0 +1,40 @@
+--- a/deps/v8/src/ppc/assembler-ppc.h.old 2019-03-05 15:16:29.000000000 +0000
++++ b/deps/v8/src/ppc/assembler-ppc.h 2019-04-02 07:05:25.977213735 +0000
+@@ -48,7 +48,8 @@
+ #include "src/ppc/constants-ppc.h"
+
+ #if V8_HOST_ARCH_PPC && \
+- (V8_OS_AIX || (V8_TARGET_ARCH_PPC64 && V8_TARGET_BIG_ENDIAN))
++ (V8_OS_AIX || (V8_TARGET_ARCH_PPC64 && \
++ (V8_TARGET_BIG_ENDIAN && (!defined(_CALL_ELF) || _CALL_ELF == 1))))
+ #define ABI_USES_FUNCTION_DESCRIPTORS 1
+ #else
+ #define ABI_USES_FUNCTION_DESCRIPTORS 0
+@@ -60,13 +61,15 @@
+ #define ABI_PASSES_HANDLES_IN_REGS 0
+ #endif
+
+-#if !V8_HOST_ARCH_PPC || !V8_TARGET_ARCH_PPC64 || V8_TARGET_LITTLE_ENDIAN
++#if !V8_HOST_ARCH_PPC || !V8_TARGET_ARCH_PPC64 || \
++ (V8_TARGET_LITTLE_ENDIAN || (defined(_CALL_ELF) && _CALL_ELF == 2))
+ #define ABI_RETURNS_OBJECT_PAIRS_IN_REGS 1
+ #else
+ #define ABI_RETURNS_OBJECT_PAIRS_IN_REGS 0
+ #endif
+
+-#if !V8_HOST_ARCH_PPC || (V8_TARGET_ARCH_PPC64 && V8_TARGET_LITTLE_ENDIAN)
++#if !V8_HOST_ARCH_PPC || (V8_TARGET_ARCH_PPC64 && \
++ (V8_TARGET_LITTLE_ENDIAN || (defined(_CALL_ELF) && _CALL_ELF == 2)))
+ #define ABI_CALL_VIA_IP 1
+ #else
+ #define ABI_CALL_VIA_IP 0
+@@ -220,7 +220,8 @@
+ // The following constants describe the stack frame linkage area as
+ // defined by the ABI. Note that kNumRequiredStackFrameSlots must
+ // satisfy alignment requirements (rounding up if required).
+-#if V8_TARGET_ARCH_PPC64 && V8_TARGET_LITTLE_ENDIAN
++#if V8_TARGET_ARCH_PPC64 && (V8_TARGET_LITTLE_ENDIAN || \
++ defined(_CALL_ELF) && _CALL_ELF == 2)
+ // [0] back chain
+ // [1] condition register save area
+ // [2] link register save area
diff --git a/user/node/python3.patch b/user/node/python3.patch
new file mode 100644
index 000000000..dca32c7e4
--- /dev/null
+++ b/user/node/python3.patch
@@ -0,0 +1,163 @@
+--- node-v10.15.3/tools/configure.d/nodedownload.py (original)
++++ node-v10.15.3/tools/configure.d/nodedownload.py (refactored)
+@@ -1,7 +1,10 @@
+ #!/usr/bin/env python
+ # Moved some utilities here from ../../configure
+
++try:
+-import urllib
++ import urllib
++except ImportError:
++ import urllib.request, urllib.parse, urllib.error
+ import hashlib
+ import sys
+ import zipfile
+@@ -18,10 +18,10 @@
+ spin = ".:|'"
+ return (spin[c % len(spin)])
+
+-class ConfigOpener(urllib.FancyURLopener):
++class ConfigOpener(urllib.request.FancyURLopener):
+ """fancy opener used by retrievefile. Set a UA"""
+ # append to existing version (UA)
+- version = '%s node.js/configure' % urllib.URLopener.version
++ version = '%s node.js/configure' % urllib.request.URLopener.version
+
+ def reporthook(count, size, total):
+ """internal hook used by retrievefile"""
+@@ -36,10 +36,10 @@
+ sys.stdout.write(' <%s>\nConnecting...\r' % url)
+ sys.stdout.flush()
+ ConfigOpener().retrieve(url, targetfile, reporthook=reporthook)
+- print '' # clear the line
++ print('') # clear the line
+ return targetfile
+ except:
+- print ' ** Error occurred while downloading\n <%s>' % url
++ print(' ** Error occurred while downloading\n <%s>' % url)
+ raise
+
+ def md5sum(targetfile):
+@@ -56,12 +56,12 @@
+ """Unpacks packedfile into parent_path. Assumes .zip. Returns parent_path"""
+ if zipfile.is_zipfile(packedfile):
+ with contextlib.closing(zipfile.ZipFile(packedfile, 'r')) as icuzip:
+- print ' Extracting zipfile: %s' % packedfile
++ print(' Extracting zipfile: %s' % packedfile)
+ icuzip.extractall(parent_path)
+ return parent_path
+ elif tarfile.is_tarfile(packedfile):
+ with contextlib.closing(tarfile.TarFile.open(packedfile, 'r')) as icuzip:
+- print ' Extracting tarfile: %s' % packedfile
++ print(' Extracting tarfile: %s' % packedfile)
+ icuzip.extractall(parent_path)
+ return parent_path
+ else:
+@@ -112,16 +112,16 @@
+ theRet[anOpt] = True
+ else:
+ # future proof: ignore unknown types
+- print 'Warning: ignoring unknown --download= type "%s"' % anOpt
++ print('Warning: ignoring unknown --download= type "%s"' % anOpt)
+ # all done
+ return theRet
+
+ def candownload(auto_downloads, package):
+- if not (package in auto_downloads.keys()):
++ if not (package in list(auto_downloads.keys())):
+ raise Exception('Internal error: "%s" is not in the --downloads list. Check nodedownload.py' % package)
+ if auto_downloads[package]:
+ return True
+ else:
+- print """Warning: Not downloading package "%s". You could pass "--download=all"
+- (Windows: "download-all") to try auto-downloading it.""" % package
++ print("""Warning: Not downloading package "%s". You could pass "--download=all"
++ (Windows: "download-all") to try auto-downloading it.""" % package)
+ return False
+--- node-v10.15.3/tools/configure.d/nodedownload.py.old 2019-04-02 00:56:07.533200475 +0000
++++ node-v10.15.3/tools/configure.d/nodedownload.py 2019-04-02 00:58:09.019947842 +0000
+@@ -6,12 +6,11 @@
+ import sys
+ import zipfile
+ import tarfile
+-import fpformat
+ import contextlib
+
+ def formatSize(amt):
+ """Format a size as a string in MB"""
+- return fpformat.fix(amt / 1024000., 1)
++ return "%{size}.1f" % (amt / 1024000.)
+
+ def spin(c):
+ """print out an ASCII 'spinner' based on the value of counter 'c'"""
+--- node-v10.15.3/configure.py.old 2019-03-05 15:16:24.000000000 +0000
++++ node-v10.15.3/configure.py 2019-04-02 01:09:04.948394534 +0000
+@@ -649,8 +649,8 @@
+ except OSError:
+ return (False, False, '', '')
+
+- proc.stdin.write('__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ '
+- '__clang_major__ __clang_minor__ __clang_patchlevel__')
++ proc.stdin.write(b'__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ '
++ b'__clang_major__ __clang_minor__ __clang_patchlevel__')
+
+ values = (proc.communicate()[0].split() + ['0'] * 7)[0:7]
+ is_clang = values[0] == '1'
+@@ -727,7 +727,7 @@
+ consider adjusting the CC environment variable if you installed
+ it in a non-standard prefix.''')
+
+- gas_ret = proc.communicate()[1]
++ gas_ret = proc.communicate()[1].decode('utf-8')
+ match = re.match(r"GNU assembler version ([2-9]\.[0-9]+)", gas_ret)
+
+ if match:
+@@ -794,7 +794,7 @@
+ consider adjusting the CC environment variable if you installed
+ it in a non-standard prefix.''')
+
+- p.stdin.write('\n')
++ p.stdin.write(b'\n')
+ out = p.communicate()[0]
+
+ out = str(out).split('\n')
+@@ -1351,7 +1351,7 @@
+ o['variables']['icu_small'] = b(True)
+ locs = set(options.with_icu_locales.split(','))
+ locs.add('root') # must have root
+- o['variables']['icu_locales'] = string.join(locs,',')
++ o['variables']['icu_locales'] = ','.join(locs)
+ # We will check a bit later if we can use the canned deps/icu-small
+ elif with_intl == 'full-icu':
+ # full ICU
+@@ -1655,7 +1655,7 @@
+ if options.prefix:
+ config['PREFIX'] = options.prefix
+
+-config = '\n'.join(map('='.join, config.iteritems())) + '\n'
++config = '\n'.join(map('='.join, config.items())) + '\n'
+
+ # On Windows there's no reason to search for a different python binary.
+ bin_override = None if sys.platform == 'win32' else make_bin_override()
+--- node-v10.15.3/configure.py.old 2019-04-02 01:12:29.786049396 +0000
++++ node-v10.15.3/configure.py 2019-04-02 01:21:08.499637208 +0000
+@@ -634,7 +634,7 @@
+ proc = subprocess.Popen(
+ shlex.split(pkg_config) + ['--silence-errors', flag, pkg],
+ stdout=subprocess.PIPE)
+- val = proc.communicate()[0].strip()
++ val = proc.communicate()[0].strip().decode('utf-8')
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise e # Unexpected error.
+ return (None, None, None, None) # No pkg-config/pkgconf installed.
+--- node-v10.15.3/configure.py.old 2019-04-02 01:27:36.437454388 +0000
++++ node-v10.15.3/configure.py 2019-04-02 01:28:06.954136125 +0000
+@@ -795,7 +795,7 @@
+ it in a non-standard prefix.''')
+
+ p.stdin.write(b'\n')
+- out = p.communicate()[0]
++ out = p.communicate()[0].decode('utf-8')
+
+ out = str(out).split('\n')
+
diff --git a/user/node/stack-silliness.patch b/user/node/stack-silliness.patch
new file mode 100644
index 000000000..e8d775b73
--- /dev/null
+++ b/user/node/stack-silliness.patch
@@ -0,0 +1,14 @@
+--- node-v10.15.3/src/inspector_agent.cc.old 2019-03-05 15:16:32.000000000 +0000
++++ node-v10.15.3/src/inspector_agent.cc 2019-04-06 02:03:11.082016553 +0000
+@@ -104,9 +104,9 @@
+ // Don't shrink the thread's stack on FreeBSD. Said platform decided to
+ // follow the pthreads specification to the letter rather than in spirit:
+ // https://lists.freebsd.org/pipermail/freebsd-current/2014-March/048885.html
+-#ifndef __FreeBSD__
++#ifdef __GLIBC__
+ CHECK_EQ(0, pthread_attr_setstacksize(&attr, PTHREAD_STACK_MIN));
+-#endif // __FreeBSD__
++#endif // __GLIBC__
+ CHECK_EQ(0, pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED));
+ sigset_t sigmask;
+ // Mask all signals.
diff --git a/user/node/v8-python3.patch b/user/node/v8-python3.patch
new file mode 100644
index 000000000..64151be4f
--- /dev/null
+++ b/user/node/v8-python3.patch
@@ -0,0 +1,169 @@
+--- node-v10.15.3/deps/v8/tools/node/fetch_deps.py.old 2019-03-05 15:16:31.000000000 +0000
++++ node-v10.15.3/deps/v8/tools/node/fetch_deps.py 2019-04-02 01:01:00.421250178 +0000
+@@ -9,6 +9,9 @@
+ Usage: fetch_deps.py <v8-path>
+ """
+
++# for py2/py3 compatibility
++from __future__ import print_function
++
+ import os
+ import subprocess
+ import sys
+@@ -51,9 +55,9 @@
+ expected_git_dir = os.path.join(v8_path, ".git")
+ actual_git_dir = git("rev-parse --absolute-git-dir")
+ if expected_git_dir == actual_git_dir:
+- print "V8 is tracked stand-alone by git."
++ print("V8 is tracked stand-alone by git.")
+ return False
+- print "Initializing temporary git repository in v8."
++ print("Initializing temporary git repository in v8.")
+ git("init")
+ git("config user.name \"Ada Lovelace\"")
+ git("config user.email ada@lovela.ce")
+@@ -70,7 +74,7 @@
+
+ temporary_git = EnsureGit(v8_path)
+ try:
+- print "Fetching dependencies."
++ print("Fetching dependencies.")
+ env = os.environ.copy()
+ # gclient needs to have depot_tools in the PATH.
+ env["PATH"] = depot_tools + os.pathsep + env["PATH"]
+--- node-v10.15.3/deps/v8/tools/node/node_common.py.old 2019-03-05 15:16:31.000000000 +0000
++++ node-v10.15.3/deps/v8/tools/node/node_common.py 2019-04-02 01:00:45.522875398 +0000
+@@ -3,11 +3,15 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
++# for py2/py3 compatibility
++from __future__ import print_function
++
+ import os
+ import pipes
+ import shutil
+ import stat
+ import subprocess
++import sys
+
+ DEPOT_TOOLS_URL = \
+ "https://chromium.googlesource.com/chromium/tools/depot_tools.git"
+@@ -22,23 +26,27 @@
+ except:
+ pass
+ if fetch_if_not_exist:
+- print "Checking out depot_tools."
++ print("Checking out depot_tools.")
+ # shell=True needed on Windows to resolve git.bat.
+ subprocess.check_call("git clone {} {}".format(
+ pipes.quote(DEPOT_TOOLS_URL),
+ pipes.quote(depot_tools)), shell=True)
++ # Using check_output to hide warning messages.
++ subprocess.check_output(
++ [sys.executable, gclient_path, "metrics", "--opt-out"],
++ cwd=depot_tools)
+ return depot_tools
+ return None
+ depot_tools = _Get(v8_path)
+ assert depot_tools is not None
+- print "Using depot tools in %s" % depot_tools
++ print("Using depot tools in %s" % depot_tools)
+ return depot_tools
+
+ def UninitGit(v8_path):
+- print "Uninitializing temporary git repository"
++ print("Uninitializing temporary git repository")
+ target = os.path.join(v8_path, ".git")
+ if os.path.isdir(target):
+- print ">> Cleaning up %s" % target
++ print(">> Cleaning up %s" % target)
+ def OnRmError(func, path, exec_info):
+ # This might happen on Windows
+ os.chmod(path, stat.S_IWRITE)
+--- node-v10.15.3/deps/v8/tools/node/update_node.py.old 2019-03-05 15:16:31.000000000 +0000
++++ node-v10.15.3/deps/v8/tools/node/update_node.py 2019-04-02 01:00:27.184875836 +0000
+@@ -23,6 +23,9 @@
+ --with-patch Also include currently staged files in the V8 checkout.
+ """
+
++# for py2/py3 compatibility
++from __future__ import print_function
++
+ import argparse
+ import os
+ import shutil
+@@ -61,9 +64,9 @@
+ # Node.js owns deps/v8/gypfiles in their downstream repository.
+ FILES_TO_KEEP = [ "gypfiles" ]
+
+ def RunGclient(path):
+ assert os.path.isdir(path)
+- print ">> Running gclient sync"
++ print(">> Running gclient sync")
+ subprocess.check_call(["gclient", "sync", "--nohooks"], cwd=path)
+
+ def CommitPatch(options):
+@@ -74,7 +77,7 @@
+ the fake git clone fetch it into node.js. We can leave the commit, as
+ bot_update will ensure a clean state on each run.
+ """
+- print ">> Committing patch"
++ print(">> Committing patch")
+ subprocess.check_call(
+ ["git", "-c", "user.name=fake", "-c", "user.email=fake@chromium.org",
+ "commit", "--allow-empty", "-m", "placeholder-commit"],
+@@ -84,8 +87,8 @@
+ def UpdateTarget(repository, options, files_to_keep):
+ source = os.path.join(options.v8_path, *repository)
+ target = os.path.join(options.node_path, TARGET_SUBDIR, *repository)
+- print ">> Updating target directory %s" % target
+- print ">> from active branch at %s" % source
++ print(">> Updating target directory %s" % target)
++ print(">> from active branch at %s" % source)
+ if not os.path.exists(target):
+ os.makedirs(target)
+ # Remove possible remnants of previous incomplete runs.
+@@ -98,10 +101,11 @@
+ git_args.append(["add"] + files_to_keep) # add and commit
+ git_args.append(["commit", "-m", "keep files"]) # files we want to keep
+
++ git_args.append(["clean", "-fxd"]) # nuke everything else
+ git_args.append(["remote", "add", "source", source]) # point to source repo
+ git_args.append(["fetch", "source", "HEAD"]) # sync to current branch
+ git_args.append(["checkout", "-f", "FETCH_HEAD"]) # switch to that branch
+- git_args.append(["clean", "-fd"]) # delete removed files
++ git_args.append(["clean", "-fxd"]) # delete removed files
+
+ if files_to_keep:
+ git_args.append(["cherry-pick", "master"]) # restore kept files
+@@ -117,17 +120,17 @@
+ def UpdateGitIgnore(options):
+ file_name = os.path.join(options.node_path, TARGET_SUBDIR, ".gitignore")
+ assert os.path.isfile(file_name)
+- print ">> Updating .gitignore with lines"
++ print(">> Updating .gitignore with lines")
+ with open(file_name) as gitignore:
+ content = gitignore.readlines()
+ content = [x.strip() for x in content]
+ for x in DELETE_FROM_GITIGNORE:
+ if x in content:
+- print "- %s" % x
++ print("- %s" % x)
+ content.remove(x)
+ for x in ADD_TO_GITIGNORE:
+ if x not in content:
+- print "+ %s" % x
++ print("+ %s" % x)
+ content.append(x)
+ content.sort(key=lambda x: x[1:] if x.startswith("!") else x)
+ with open(file_name, "w") as gitignore:
+@@ -135,7 +138,7 @@
+ gitignore.write("%s\n" % x)
+
+ def CreateCommit(options):
+- print ">> Creating commit."
++ print(">> Creating commit.")
+ # Find git hash from source.
+ githash = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"],
+ cwd=options.v8_path).strip()
diff --git a/user/nsd/APKBUILD b/user/nsd/APKBUILD
index 388c902a1..f99b4514e 100644
--- a/user/nsd/APKBUILD
+++ b/user/nsd/APKBUILD
@@ -1,7 +1,7 @@
# Contributor: Luis Ressel <aranea@aixah.de>
# Maintainer: Luis Ressel <aranea@aixah.de>
pkgname=nsd
-pkgver=4.1.26
+pkgver=4.1.27
pkgrel=0
pkgdesc="An authoritative only name server"
url="https://www.nlnetlabs.nl/projects/nsd/about/"
@@ -50,6 +50,6 @@ openrc() {
install -Dm644 "$srcdir/nsd.confd" "$subpkgdir/etc/conf.d/nsd"
}
-sha512sums="49c07ad79e4e056bd519dad157a44d5cc55cd9a28723fd910159dc761d4ba56ef86cb7bf289dc5d1b363a49b999232bb7ec1afa737ca5c2659c4c9893e93494b nsd-4.1.26.tar.gz
+sha512sums="9c75041f5a6213cdba7238c2e51fc73031f6f073e06587659f93992fed49418ee789642b25b5522d48642507050ac15021f385927eed81ce5ea649f974e66402 nsd-4.1.27.tar.gz
f0ef1d3427e92650239d9d91402810c045fc9223e3f42ce86986422bf2039a0bcc02dffdfe1153d54de5c76c8f2bdc3e34fe341c65b41f2d333b02c00b5b0eae nsd.confd
139e52dec98792173f06d298574db0d0e6966a06af8a0a3069487beb01fd570c09d22322569b54bacdc43232dbfb99a8c497d4417d2bbfee88bcdd9d1b4d22f7 nsd.initd"
diff --git a/user/py3-pycairo/APKBUILD b/user/py3-pycairo/APKBUILD
index 87de6f166..9dce8a2ed 100644
--- a/user/py3-pycairo/APKBUILD
+++ b/user/py3-pycairo/APKBUILD
@@ -7,9 +7,12 @@ pkgdesc="Python bindings for Cairo"
url="https://pycairo.readthedocs.io/"
arch="all"
license="MIT AND (LGPL-2.1-only OR MPL-1.1)"
+depends=""
makedepends="python3-dev cairo-dev meson"
subpackages="$pkgname-dev"
-source="https://files.pythonhosted.org/packages/source/p/pycairo/pycairo-$pkgver.tar.gz"
+source="https://files.pythonhosted.org/packages/source/p/pycairo/pycairo-$pkgver.tar.gz
+ meson-idiocy.patch
+ "
builddir="$srcdir/pycairo-$pkgver"
build() {
@@ -33,4 +36,5 @@ package() {
DESTDIR="$pkgdir" ninja -C output install
}
-sha512sums="29af1eff8fc3ca942c6d7fcda8e5e004f01d1bfdab911f4ebb34213520922cf35c12d07bd18fe74a2c6d3c7f5a1aefd7fb5fe9b7cd310e6707b7451d5d5e18b0 pycairo-1.18.0.tar.gz"
+sha512sums="29af1eff8fc3ca942c6d7fcda8e5e004f01d1bfdab911f4ebb34213520922cf35c12d07bd18fe74a2c6d3c7f5a1aefd7fb5fe9b7cd310e6707b7451d5d5e18b0 pycairo-1.18.0.tar.gz
+33d400b1c27c549f0962d08e50ca4a6614e714896f5cca76128f8b4d3aac4c07b50bab03ee611de2e17d4c0001bbe3f4f02137e8988b040e9deea911607ad917 meson-idiocy.patch"
diff --git a/user/py3-pycairo/meson-idiocy.patch b/user/py3-pycairo/meson-idiocy.patch
new file mode 100644
index 000000000..99ed7d71c
--- /dev/null
+++ b/user/py3-pycairo/meson-idiocy.patch
@@ -0,0 +1,27 @@
+From 878932bf9c1545d659822961459a2601287b1675 Mon Sep 17 00:00:00 2001
+From: Christoph Reiter <reiter.christoph@gmail.com>
+Date: Sat, 23 Mar 2019 22:26:01 +0100
+Subject: [PATCH] meson: fix configure error with meson 0.50 re absolute paths.
+ Fixes #144
+
+The newest meson version has started to fail:
+meson.build:62:0: ERROR: Subdir keyword must not be an absolute path.
+
+Fix by using install_dir instead of subdir.
+---
+ cairo/meson.build | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/cairo/meson.build b/cairo/meson.build
+index 6a1bde3..3e3b2cc 100644
+--- a/cairo/meson.build
++++ b/cairo/meson.build
+@@ -61,7 +61,7 @@ header_file = configure_file(
+
+ install_headers(
+ [header_file],
+- subdir: join_paths(python.get_install_dir(), 'cairo', 'include'),
++ install_dir: join_paths(python.get_install_dir(), 'cairo', 'include'),
+ )
+ install_headers([header_file], subdir: 'pycairo')
+
diff --git a/user/rust/0001-Don-t-pass-CFLAGS-to-the-C-compiler.patch b/user/rust/0001-Don-t-pass-CFLAGS-to-the-C-compiler.patch
index f56ceb7e8..bb20cb7b6 100644
--- a/user/rust/0001-Don-t-pass-CFLAGS-to-the-C-compiler.patch
+++ b/user/rust/0001-Don-t-pass-CFLAGS-to-the-C-compiler.patch
@@ -1,17 +1,17 @@
-From 2a82e95d73d7e86bd420c15f712b09eb104a82c1 Mon Sep 17 00:00:00 2001
+From 6861c8305fc6449fdc019405d501e5740d44b818 Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Wed, 10 Jan 2018 13:36:41 -0600
-Subject: [PATCH 01/23] Don't pass CFLAGS to the C++ compiler
+Subject: [PATCH 01/14] Don't pass CFLAGS to the C++ compiler
---
src/bootstrap/builder.rs | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
-index 02e9ca9250..7be38b1e8b 100644
+index 9c58f5b179..bbb275c572 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
-@@ -1041,7 +1041,7 @@ impl<'a> Builder<'a> {
+@@ -1037,7 +1037,7 @@ impl<'a> Builder<'a> {
let cflags = self.cflags(target, GitRepo::Rustc).join(" ");
cargo
.env(format!("CFLAGS_{}", target), cflags.clone())
@@ -20,7 +20,7 @@ index 02e9ca9250..7be38b1e8b 100644
if let Some(ar) = self.ar(target) {
let ranlib = format!("{} s", ar.display());
-@@ -1056,9 +1056,7 @@ impl<'a> Builder<'a> {
+@@ -1052,9 +1052,7 @@ impl<'a> Builder<'a> {
let cxx = ccacheify(&cxx);
cargo
.env(format!("CXX_{}", target), &cxx)
@@ -32,5 +32,5 @@ index 02e9ca9250..7be38b1e8b 100644
}
--
-2.19.2
+2.21.0
diff --git a/user/rust/0002-Fix-LLVM-build.patch b/user/rust/0002-Fix-LLVM-build.patch
index 6938e0faf..7d12e84d3 100644
--- a/user/rust/0002-Fix-LLVM-build.patch
+++ b/user/rust/0002-Fix-LLVM-build.patch
@@ -1,26 +1,26 @@
-From d70cd5b6d304cc1b6d8f245460b27b8b0e0e6869 Mon Sep 17 00:00:00 2001
+From d44d2ca85013e85b2b72940609131d1229647461 Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Fri, 8 Sep 2017 00:04:29 -0500
-Subject: [PATCH 02/23] Fix LLVM build
+Subject: [PATCH 02/14] Fix LLVM build
---
src/bootstrap/lib.rs | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
-index ab3d0b5137..30e84bc13f 100644
+index 7491385af7..74c04bac6b 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
-@@ -785,7 +785,8 @@ impl Build {
+@@ -772,7 +772,8 @@ impl Build {
// cc-rs because the build scripts will determine that for themselves.
let mut base = self.cc[&target].args().iter()
.map(|s| s.to_string_lossy().into_owned())
- .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
+ .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")
+ && !s.starts_with("-static"))
- .collect::<Vec<_>>();
+ .collect::<Vec<String>>();
// If we're compiling on macOS then we add a few unconditional flags
--
-2.19.2
+2.21.0
diff --git a/user/rust/0003-Allow-rustdoc-to-work-when-cross-compiling-on-musl.patch b/user/rust/0003-Allow-rustdoc-to-work-when-cross-compiling-on-musl.patch
index 588c4df66..c1cb5e930 100644
--- a/user/rust/0003-Allow-rustdoc-to-work-when-cross-compiling-on-musl.patch
+++ b/user/rust/0003-Allow-rustdoc-to-work-when-cross-compiling-on-musl.patch
@@ -1,7 +1,7 @@
-From 0613fed83ccf58ce3305a1df35e31eda25ba409e Mon Sep 17 00:00:00 2001
+From 520ceedb4a6384da5c207aa434a29d68fc8e238a Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Sat, 2 Dec 2017 17:25:44 -0600
-Subject: [PATCH 03/23] Allow rustdoc to work when cross-compiling on musl
+Subject: [PATCH 03/14] Allow rustdoc to work when cross-compiling on musl
musl can't handle foreign-architecture libraries in LD_LIBRARY_PATH.
---
@@ -9,10 +9,10 @@ musl can't handle foreign-architecture libraries in LD_LIBRARY_PATH.
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
-index bb5a21e3e4..1b6b78b90a 100644
+index dec74e60c7..af76331db0 100644
--- a/src/bootstrap/bin/rustdoc.rs
+++ b/src/bootstrap/bin/rustdoc.rs
-@@ -34,9 +34,6 @@ fn main() {
+@@ -24,9 +24,6 @@ fn main() {
Err(_) => 0,
};
@@ -22,7 +22,7 @@ index bb5a21e3e4..1b6b78b90a 100644
//FIXME(misdreavus): once stdsimd uses cfg(rustdoc) instead of cfg(dox), remove the `--cfg dox`
//arguments here
let mut cmd = Command::new(rustdoc);
-@@ -48,7 +45,7 @@ fn main() {
+@@ -38,7 +35,7 @@ fn main() {
.arg("--sysroot")
.arg(sysroot)
.env(bootstrap::util::dylib_path_var(),
@@ -32,5 +32,5 @@ index bb5a21e3e4..1b6b78b90a 100644
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
--
-2.19.2
+2.21.0
diff --git a/user/rust/0004-Require-static-native-libraries-when-linking-static-.patch b/user/rust/0004-Require-static-native-libraries-when-linking-static-.patch
index cd01e4153..6ec5eb76f 100644
--- a/user/rust/0004-Require-static-native-libraries-when-linking-static-.patch
+++ b/user/rust/0004-Require-static-native-libraries-when-linking-static-.patch
@@ -1,7 +1,7 @@
-From 4e6debd6806faf62ba737c01bb74080a637b127a Mon Sep 17 00:00:00 2001
+From 46e6ead56b2899d9041fe73c1530b7f02df2ca40 Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Fri, 8 Sep 2017 00:05:18 -0500
-Subject: [PATCH 04/23] Require static native libraries when linking static
+Subject: [PATCH 04/14] Require static native libraries when linking static
executables
gcc/ld will create a dynamically-linked executable without warning, even
@@ -15,10 +15,10 @@ Fixes #54243
1 file changed, 10 insertions(+), 3 deletions(-)
diff --git a/src/librustc_codegen_llvm/back/link.rs b/src/librustc_codegen_llvm/back/link.rs
-index 86c6a5e65b..0e3ff6da07 100644
+index fc744201a3..bae8b6ed75 100644
--- a/src/librustc_codegen_llvm/back/link.rs
+++ b/src/librustc_codegen_llvm/back/link.rs
-@@ -1588,8 +1588,8 @@ fn add_upstream_rust_crates(cmd: &mut dyn Linker,
+@@ -1413,8 +1413,8 @@ fn add_upstream_rust_crates(cmd: &mut dyn Linker,
}
}
@@ -29,7 +29,7 @@ index 86c6a5e65b..0e3ff6da07 100644
// dependencies. We've got two cases then:
//
// 1. The upstream crate is an rlib. In this case we *must* link in the
-@@ -1633,7 +1633,14 @@ fn add_upstream_native_libraries(cmd: &mut dyn Linker,
+@@ -1458,7 +1458,14 @@ fn add_upstream_native_libraries(cmd: &mut dyn Linker,
continue
}
match lib.kind {
@@ -46,5 +46,5 @@ index 86c6a5e65b..0e3ff6da07 100644
NativeLibraryKind::NativeStaticNobundle => {
// Link "static-nobundle" native libs only if the crate they originate from
--
-2.19.2
+2.21.0
diff --git a/user/rust/0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch b/user/rust/0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch
index 4c0a8dee0..6053de26b 100644
--- a/user/rust/0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch
+++ b/user/rust/0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch
@@ -1,28 +1,28 @@
-From c822b31f554ed2f930be8625973a401fd438c123 Mon Sep 17 00:00:00 2001
+From 5d98731bae319172c5fbfd8235f62bb00acb3a7d Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Fri, 8 Sep 2017 22:11:14 -0500
-Subject: [PATCH 05/23] Remove -nostdlib and musl_root from musl targets
+Subject: [PATCH 05/14] Remove -nostdlib and musl_root from musl targets
---
- config.toml.example | 6 ---
- src/bootstrap/bin/rustc.rs | 10 -----
- src/bootstrap/cc_detect.rs | 27 ++----------
- src/bootstrap/compile.rs | 31 --------------
- src/bootstrap/config.rs | 7 ----
- src/bootstrap/configure.py | 22 ----------
- src/bootstrap/lib.rs | 8 ----
- src/bootstrap/sanity.rs | 30 +-------------
- .../dist-i586-gnu-i586-i686-musl/Dockerfile | 2 -
- src/ci/docker/dist-various-1/Dockerfile | 7 ----
+ config.toml.example | 6 ----
+ src/bootstrap/bin/rustc.rs | 12 --------
+ src/bootstrap/cc_detect.rs | 27 ++----------------
+ src/bootstrap/compile.rs | 21 --------------
+ src/bootstrap/config.rs | 7 -----
+ src/bootstrap/configure.py | 22 ---------------
+ src/bootstrap/lib.rs | 8 ------
+ src/bootstrap/sanity.rs | 28 -------------------
+ .../dist-i586-gnu-i586-i686-musl/Dockerfile | 2 --
+ src/ci/docker/dist-various-1/Dockerfile | 7 -----
src/ci/docker/dist-x86_64-musl/Dockerfile | 1 -
- src/librustc_target/spec/linux_musl_base.rs | 41 -------------------
- 12 files changed, 4 insertions(+), 188 deletions(-)
+ src/librustc_target/spec/linux_musl_base.rs | 16 -----------
+ 12 files changed, 3 insertions(+), 154 deletions(-)
diff --git a/config.toml.example b/config.toml.example
-index e8cb0cba6b..1971fea758 100644
+index 23943d34b7..5eace54c6b 100644
--- a/config.toml.example
+++ b/config.toml.example
-@@ -452,12 +452,6 @@
+@@ -461,12 +461,6 @@
# only use static libraries. If unset, the target's default linkage is used.
#crt-static = false
@@ -36,10 +36,10 @@ index e8cb0cba6b..1971fea758 100644
# probably don't want to use this.
#qemu-rootfs = "..."
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
-index b6764c1aae..6fa60bd9a5 100644
+index a0c75cd9e9..f8daab9bd1 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
-@@ -30,7 +30,6 @@
+@@ -20,7 +20,6 @@
extern crate bootstrap;
use std::env;
@@ -47,27 +47,29 @@ index b6764c1aae..6fa60bd9a5 100644
use std::io;
use std::path::PathBuf;
use std::process::Command;
-@@ -126,15 +125,6 @@ fn main() {
+@@ -116,17 +115,6 @@ fn main() {
cmd.arg("-Cprefer-dynamic");
}
- // Help the libc crate compile by assisting it in finding the MUSL
- // native libraries.
- if let Some(s) = env::var_os("MUSL_ROOT") {
-- let mut root = OsString::from("native=");
-- root.push(&s);
-- root.push("/lib");
-- cmd.arg("-L").arg(&root);
+- if target.contains("musl") {
+- let mut root = OsString::from("native=");
+- root.push(&s);
+- root.push("/lib");
+- cmd.arg("-L").arg(&root);
+- }
- }
-
// Override linker if necessary.
if let Ok(target_linker) = env::var("RUSTC_TARGET_LINKER") {
cmd.arg(format!("-Clinker={}", target_linker));
diff --git a/src/bootstrap/cc_detect.rs b/src/bootstrap/cc_detect.rs
-index d5da0cabec..47cc31bded 100644
+index 37844759c7..fa553c0b06 100644
--- a/src/bootstrap/cc_detect.rs
+++ b/src/bootstrap/cc_detect.rs
-@@ -95,7 +95,7 @@ pub fn find(build: &mut Build) {
+@@ -85,7 +85,7 @@ pub fn find(build: &mut Build) {
if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
cfg.compiler(cc);
} else {
@@ -76,7 +78,7 @@ index d5da0cabec..47cc31bded 100644
}
let compiler = cfg.get_compiler();
-@@ -124,7 +124,7 @@ pub fn find(build: &mut Build) {
+@@ -114,7 +114,7 @@ pub fn find(build: &mut Build) {
if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
cfg.compiler(cxx);
} else {
@@ -85,7 +87,7 @@ index d5da0cabec..47cc31bded 100644
}
let compiler = cfg.get_compiler();
build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
-@@ -135,8 +135,7 @@ pub fn find(build: &mut Build) {
+@@ -125,8 +125,7 @@ pub fn find(build: &mut Build) {
fn set_compiler(cfg: &mut cc::Build,
compiler: Language,
target: Interned<String>,
@@ -95,7 +97,7 @@ index d5da0cabec..47cc31bded 100644
match &*target {
// When compiling for android we may have the NDK configured in the
// config.toml in which case we look there. Otherwise the default
-@@ -173,26 +172,6 @@ fn set_compiler(cfg: &mut cc::Build,
+@@ -166,26 +165,6 @@ fn set_compiler(cfg: &mut cc::Build,
}
}
@@ -123,60 +125,36 @@ index d5da0cabec..47cc31bded 100644
}
}
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
-index 69d45acded..7bb38ac7ec 100644
+index b581271663..21421b2e38 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
-@@ -86,13 +86,6 @@ impl Step for Std {
- });
- builder.info(&format!("Uplifting stage1 std ({} -> {})", from.host, target));
-
-- // Even if we're not building std this stage, the new sysroot must
-- // still contain the musl startup objects.
-- if target.contains("musl") {
-- let libdir = builder.sysroot_libdir(compiler, target);
-- copy_musl_third_party_objects(builder, target, &libdir);
-- }
--
- builder.ensure(StdLink {
- compiler: from,
- target_compiler: compiler,
-@@ -101,11 +94,6 @@ impl Step for Std {
- return;
- }
+@@ -114,21 +114,6 @@ impl Step for Std {
+ fn copy_third_party_objects(builder: &Builder, compiler: &Compiler, target: Interned<String>) {
+ let libdir = builder.sysroot_libdir(*compiler, target);
-- if target.contains("musl") {
-- let libdir = builder.sysroot_libdir(compiler, target);
-- copy_musl_third_party_objects(builder, target, &libdir);
+- // Copies the crt(1,i,n).o startup objects
+- //
+- // Since musl supports fully static linking, we can cross link for it even
+- // with a glibc-targeting toolchain, given we have the appropriate startup
+- // files. As those shipped with glibc won't work, copy the ones provided by
+- // musl so we have them on linux-gnu hosts.
+- if target.contains("musl") {
+- for &obj in &["crt1.o", "crti.o", "crtn.o"] {
+- builder.copy(
+- &builder.musl_root(target).unwrap().join("lib").join(obj),
+- &libdir.join(obj),
+- );
- }
--
- let mut cargo = builder.cargo(compiler, Mode::Std, target, "build");
- std_cargo(builder, &compiler, target, &mut cargo);
-
-@@ -126,20 +114,6 @@ impl Step for Std {
- }
- }
-
--/// Copies the crt(1,i,n).o startup objects
--///
--/// Since musl supports fully static linking, we can cross link for it even
--/// with a glibc-targeting toolchain, given we have the appropriate startup
--/// files. As those shipped with glibc won't work, copy the ones provided by
--/// musl so we have them on linux-gnu hosts.
--fn copy_musl_third_party_objects(builder: &Builder,
-- target: Interned<String>,
-- into: &Path) {
-- for &obj in &["crt1.o", "crti.o", "crtn.o"] {
-- builder.copy(&builder.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
- }
--}
-
- /// Configure cargo to compile the standard library, adding appropriate env vars
- /// and such.
- pub fn std_cargo(builder: &Builder,
-@@ -193,11 +167,6 @@ pub fn std_cargo(builder: &Builder,
- cargo.env("JEMALLOC_OVERRIDE", jemalloc);
- }
- }
+ // Copies libunwind.a compiled to be linked wit x86_64-fortanix-unknown-sgx.
+ //
+ // This target needs to be linked to Fortanix's port of llvm's libunwind.
+@@ -182,12 +167,6 @@ pub fn std_cargo(builder: &Builder,
+ cargo.arg("--features").arg(features)
+ .arg("--manifest-path")
+ .arg(builder.src.join("src/libstd/Cargo.toml"));
+-
- if target.contains("musl") {
- if let Some(p) = builder.musl_root(target) {
- cargo.env("MUSL_ROOT", p);
@@ -186,10 +164,10 @@ index 69d45acded..7bb38ac7ec 100644
}
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
-index a9d330e06a..745785a8ae 100644
+index 9421817ae6..cd70c1a1e4 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
-@@ -136,8 +136,6 @@ pub struct Config {
+@@ -128,8 +128,6 @@ pub struct Config {
pub print_step_timings: bool,
pub missing_tools: bool,
@@ -198,7 +176,7 @@ index a9d330e06a..745785a8ae 100644
pub prefix: Option<PathBuf>,
pub sysconfdir: Option<PathBuf>,
pub datadir: Option<PathBuf>,
-@@ -173,7 +171,6 @@ pub struct Target {
+@@ -164,7 +162,6 @@ pub struct Target {
pub linker: Option<PathBuf>,
pub ndk: Option<PathBuf>,
pub crt_static: Option<bool>,
@@ -206,7 +184,7 @@ index a9d330e06a..745785a8ae 100644
pub qemu_rootfs: Option<PathBuf>,
pub no_std: bool,
}
-@@ -305,7 +302,6 @@ struct Rust {
+@@ -296,7 +293,6 @@ struct Rust {
backtrace: Option<bool>,
default_linker: Option<String>,
channel: Option<String>,
@@ -214,7 +192,7 @@ index a9d330e06a..745785a8ae 100644
rpath: Option<bool>,
optimize_tests: Option<bool>,
debuginfo_tests: Option<bool>,
-@@ -343,7 +339,6 @@ struct TomlTarget {
+@@ -335,7 +331,6 @@ struct TomlTarget {
linker: Option<String>,
android_ndk: Option<String>,
crt_static: Option<bool>,
@@ -222,7 +200,7 @@ index a9d330e06a..745785a8ae 100644
qemu_rootfs: Option<String>,
}
-@@ -560,7 +555,6 @@ impl Config {
+@@ -549,7 +544,6 @@ impl Config {
set(&mut config.llvm_tools_enabled, rust.llvm_tools);
config.rustc_parallel_queries = rust.experimental_parallel_queries.unwrap_or(false);
config.rustc_default_linker = rust.default_linker.clone();
@@ -230,7 +208,7 @@ index a9d330e06a..745785a8ae 100644
config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from);
set(&mut config.deny_warnings, rust.deny_warnings.or(flags.warnings));
set(&mut config.backtrace_on_ice, rust.backtrace_on_ice);
-@@ -604,7 +598,6 @@ impl Config {
+@@ -592,7 +586,6 @@ impl Config {
target.ranlib = cfg.ranlib.clone().map(PathBuf::from);
target.linker = cfg.linker.clone().map(PathBuf::from);
target.crt_static = cfg.crt_static.clone();
@@ -239,10 +217,10 @@ index a9d330e06a..745785a8ae 100644
config.target_config.insert(INTERNER.intern_string(triple.clone()), target);
diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py
-index 0cf84a6298..4845e93a0e 100755
+index b0c3c97024..5128897bf7 100755
--- a/src/bootstrap/configure.py
+++ b/src/bootstrap/configure.py
-@@ -112,28 +112,6 @@ v("aarch64-linux-android-ndk", "target.aarch64-linux-android.android-ndk",
+@@ -105,28 +105,6 @@ v("aarch64-linux-android-ndk", "target.aarch64-linux-android.android-ndk",
"aarch64-linux-android NDK standalone path")
v("x86_64-linux-android-ndk", "target.x86_64-linux-android.android-ndk",
"x86_64-linux-android NDK standalone path")
@@ -272,10 +250,10 @@ index 0cf84a6298..4845e93a0e 100755
"rootfs in qemu testing, you probably don't want to use this")
v("qemu-aarch64-rootfs", "target.aarch64-unknown-linux-gnu.qemu-rootfs",
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
-index 30e84bc13f..cf6f410ccb 100644
+index 74c04bac6b..aa91600ea7 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
-@@ -861,14 +861,6 @@ impl Build {
+@@ -849,14 +849,6 @@ impl Build {
}
}
@@ -291,19 +269,10 @@ index 30e84bc13f..cf6f410ccb 100644
fn no_std(&self, target: Interned<String>) -> Option<bool> {
self.config.target_config.get(&target)
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
-index 724cb5841f..5af34e441c 100644
+index fe547a6b15..a452c874c4 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
-@@ -21,7 +21,7 @@
- use std::collections::HashMap;
- use std::env;
- use std::ffi::{OsString, OsStr};
--use std::fs::{self, File};
-+use std::fs::File;
- use std::io::Read;
- use std::path::PathBuf;
- use std::process::Command;
-@@ -186,34 +186,6 @@ pub fn check(build: &mut Build) {
+@@ -169,34 +169,6 @@ pub fn check(build: &mut Build) {
}
}
@@ -352,12 +321,12 @@ index ba2d32a929..412c37fdd1 100644
--disable-docs
diff --git a/src/ci/docker/dist-various-1/Dockerfile b/src/ci/docker/dist-various-1/Dockerfile
-index e2484b7224..1855b7f3e5 100644
+index ab2dd5a399..a9569fe6d4 100644
--- a/src/ci/docker/dist-various-1/Dockerfile
+++ b/src/ci/docker/dist-various-1/Dockerfile
-@@ -116,13 +116,6 @@ ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \
- CC_armebv7r_none_eabi=arm-none-eabi-gcc
-
+@@ -128,13 +128,6 @@ ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \
+ CXX_thumbv7neon_unknown_linux_gnueabihf=arm-linux-gnueabihf-g++
+
ENV RUST_CONFIGURE_ARGS \
- --musl-root-armv5te=/musl-armv5te \
- --musl-root-arm=/musl-arm \
@@ -382,10 +351,10 @@ index 06f8a2fbba..f5dd379528 100644
--disable-docs
diff --git a/src/librustc_target/spec/linux_musl_base.rs b/src/librustc_target/spec/linux_musl_base.rs
-index 7a3f3c2a51..32fe2f880a 100644
+index 1bc90d1a73..e26a5240a6 100644
--- a/src/librustc_target/spec/linux_musl_base.rs
+++ b/src/librustc_target/spec/linux_musl_base.rs
-@@ -13,53 +13,12 @@ use spec::{LinkerFlavor, TargetOptions};
+@@ -3,28 +3,12 @@ use spec::{LinkerFlavor, TargetOptions};
pub fn opts() -> TargetOptions {
let mut base = super::linux_base::opts();
@@ -400,31 +369,6 @@ index 7a3f3c2a51..32fe2f880a 100644
// argument is *not* necessary for normal builds, but it can't hurt!
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-Wl,--eh-frame-hdr".to_string());
-- // There's a whole bunch of circular dependencies when dealing with MUSL
-- // unfortunately. To put this in perspective libc is statically linked to
-- // liblibc and libunwind is statically linked to libstd:
-- //
-- // * libcore depends on `fmod` which is in libc (transitively in liblibc).
-- // liblibc, however, depends on libcore.
-- // * compiler-rt has personality symbols that depend on libunwind, but
-- // libunwind is in libstd which depends on compiler-rt.
-- //
-- // Recall that linkers discard libraries and object files as much as
-- // possible, and with all the static linking and archives flying around with
-- // MUSL the linker is super aggressively stripping out objects. For example
-- // the first case has fmod stripped from liblibc (it's in its own object
-- // file) so it's not there when libcore needs it. In the second example all
-- // the unused symbols from libunwind are stripped (each is in its own object
-- // file in libstd) before we end up linking compiler-rt which depends on
-- // those symbols.
-- //
-- // To deal with these circular dependencies we just force the compiler to
-- // link everything as a group, not stripping anything out until everything
-- // is processed. The linker will still perform a pass to strip out object
-- // files but it won't do so until all objects/archives have been processed.
-- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-Wl,-(".to_string());
-- base.post_link_args.insert(LinkerFlavor::Gcc, vec!["-Wl,-)".to_string()]);
--
- // When generating a statically linked executable there's generally some
- // small setup needed which is listed in these files. These are provided by
- // a musl toolchain and are linked by default by the `musl-gcc` script. Note
@@ -440,5 +384,5 @@ index 7a3f3c2a51..32fe2f880a 100644
base.crt_static_default = true;
// These targets allow the user to choose between static and dynamic linking.
--
-2.19.2
+2.21.0
diff --git a/user/rust/0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch b/user/rust/0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch
index e3a4b47cf..f7f7ccd3e 100644
--- a/user/rust/0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch
+++ b/user/rust/0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch
@@ -1,17 +1,17 @@
-From 916ce9740ddd2e41a567f821f9c0e86faa3c8269 Mon Sep 17 00:00:00 2001
+From eb2340bd8c8d9e79eb4bb231ccb31cd4e4bba907 Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Sat, 9 Sep 2017 00:14:16 -0500
-Subject: [PATCH 06/23] Prefer libgcc_eh over libunwind for musl
+Subject: [PATCH 06/14] Prefer libgcc_eh over libunwind for musl
---
src/libunwind/lib.rs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/libunwind/lib.rs b/src/libunwind/lib.rs
-index 954eb9d6d0..a127aa5d96 100644
+index 7ed7837268..2f38a43845 100644
--- a/src/libunwind/lib.rs
+++ b/src/libunwind/lib.rs
-@@ -36,6 +36,6 @@ cfg_if! {
+@@ -26,6 +26,6 @@ cfg_if! {
}
#[cfg(target_env = "musl")]
@@ -20,5 +20,5 @@ index 954eb9d6d0..a127aa5d96 100644
#[link(name = "gcc_s", cfg(not(target_feature = "crt-static")))]
extern {}
--
-2.19.2
+2.21.0
diff --git a/user/rust/0007-Add-powerpc-unknown-linux-musl-target.patch b/user/rust/0007-Add-powerpc-unknown-linux-musl-target.patch
deleted file mode 100644
index f186af55f..000000000
--- a/user/rust/0007-Add-powerpc-unknown-linux-musl-target.patch
+++ /dev/null
@@ -1,66 +0,0 @@
-From a69d4e607d3eda390fd5c6e97022d4ef20af5676 Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Mon, 10 Sep 2018 01:35:35 +0000
-Subject: [PATCH 07/23] Add powerpc-unknown-linux-musl target
-
----
- src/librustc_target/spec/mod.rs | 1 +
- .../spec/powerpc_unknown_linux_musl.rs | 34 +++++++++++++++++++
- 2 files changed, 35 insertions(+)
- create mode 100644 src/librustc_target/spec/powerpc_unknown_linux_musl.rs
-
-diff --git a/src/librustc_target/spec/mod.rs b/src/librustc_target/spec/mod.rs
-index d43d45f64a..4b03f6141f 100644
---- a/src/librustc_target/spec/mod.rs
-+++ b/src/librustc_target/spec/mod.rs
-@@ -297,6 +297,7 @@ supported_targets! {
- ("mipsel-unknown-linux-gnu", mipsel_unknown_linux_gnu),
- ("powerpc-unknown-linux-gnu", powerpc_unknown_linux_gnu),
- ("powerpc-unknown-linux-gnuspe", powerpc_unknown_linux_gnuspe),
-+ ("powerpc-unknown-linux-musl", powerpc_unknown_linux_musl),
- ("powerpc64-unknown-linux-gnu", powerpc64_unknown_linux_gnu),
- ("powerpc64le-unknown-linux-gnu", powerpc64le_unknown_linux_gnu),
- ("powerpc64le-unknown-linux-musl", powerpc64le_unknown_linux_musl),
-diff --git a/src/librustc_target/spec/powerpc_unknown_linux_musl.rs b/src/librustc_target/spec/powerpc_unknown_linux_musl.rs
-new file mode 100644
-index 0000000000..48340da514
---- /dev/null
-+++ b/src/librustc_target/spec/powerpc_unknown_linux_musl.rs
-@@ -0,0 +1,34 @@
-+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
-+// file at the top-level directory of this distribution and at
-+// http://rust-lang.org/COPYRIGHT.
-+//
-+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-+// option. This file may not be copied, modified, or distributed
-+// except according to those terms.
-+
-+use spec::{LinkerFlavor, Target, TargetResult};
-+
-+pub fn target() -> TargetResult {
-+ let mut base = super::linux_musl_base::opts();
-+ base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
-+ base.max_atomic_width = Some(32);
-+
-+ // see #36994
-+ base.exe_allocation_crate = None;
-+
-+ Ok(Target {
-+ llvm_target: "powerpc-unknown-linux-musl".to_string(),
-+ target_endian: "big".to_string(),
-+ target_pointer_width: "32".to_string(),
-+ target_c_int_width: "32".to_string(),
-+ data_layout: "E-m:e-p:32:32-i64:64-n32".to_string(),
-+ arch: "powerpc".to_string(),
-+ target_os: "linux".to_string(),
-+ target_env: "musl".to_string(),
-+ target_vendor: "unknown".to_string(),
-+ linker_flavor: LinkerFlavor::Gcc,
-+ options: base,
-+ })
-+}
---
-2.19.2
-
diff --git a/user/rust/0012-runtest-Fix-proc-macro-tests-on-musl-hosts.patch b/user/rust/0007-runtest-Fix-proc-macro-tests-on-musl-hosts.patch
index c8b2529d8..6435eeef0 100644
--- a/user/rust/0012-runtest-Fix-proc-macro-tests-on-musl-hosts.patch
+++ b/user/rust/0007-runtest-Fix-proc-macro-tests-on-musl-hosts.patch
@@ -1,17 +1,17 @@
-From 050dfd0e91b92cb350ca52599266cefc6c765106 Mon Sep 17 00:00:00 2001
+From e60db771c8ffe7b82a97ca516737cbf40b372e54 Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Sun, 16 Sep 2018 16:40:04 +0000
-Subject: [PATCH 12/23] runtest: Fix proc-macro tests on musl hosts
+Subject: [PATCH 07/14] runtest: Fix proc-macro tests on musl hosts
---
src/tools/compiletest/src/runtest.rs | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs
-index a80bbd401a..bbe0a6fb7a 100644
+index 400c205d44..be5c50b070 100644
--- a/src/tools/compiletest/src/runtest.rs
+++ b/src/tools/compiletest/src/runtest.rs
-@@ -1582,7 +1582,6 @@ impl<'test> TestCx<'test> {
+@@ -1566,7 +1566,6 @@ impl<'test> TestCx<'test> {
None
} else if self.config.target.contains("cloudabi")
|| self.config.target.contains("emscripten")
@@ -19,7 +19,7 @@ index a80bbd401a..bbe0a6fb7a 100644
|| self.config.target.contains("wasm32")
{
// We primarily compile all auxiliary libraries as dynamic libraries
-@@ -1590,10 +1589,8 @@ impl<'test> TestCx<'test> {
+@@ -1574,10 +1573,8 @@ impl<'test> TestCx<'test> {
// for the test suite (otherwise including libstd statically in all
// executables takes up quite a bit of space).
//
@@ -33,5 +33,5 @@ index a80bbd401a..bbe0a6fb7a 100644
} else {
Some("dylib")
--
-2.19.2
+2.21.0
diff --git a/user/rust/0008-Fix-powerpc64-ELFv2-big-endian-struct-passing-ABI.patch b/user/rust/0008-Fix-powerpc64-ELFv2-big-endian-struct-passing-ABI.patch
deleted file mode 100644
index 1c4c813fc..000000000
--- a/user/rust/0008-Fix-powerpc64-ELFv2-big-endian-struct-passing-ABI.patch
+++ /dev/null
@@ -1,79 +0,0 @@
-From 8a2c9af2ea8b21d38f5ff779fde29f612bb63575 Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Sun, 16 Sep 2018 16:34:15 +0000
-Subject: [PATCH 08/23] Fix powerpc64 ELFv2 big-endian struct-passing ABI
-
-The requirements here are not "ELFv1" requirements, but big-endian
-requirements, as the extension or non-extension of the argument is
-necessary to put the argument in the correct half of the register.
-Parameter passing in the ELFv2 ABI needs these same transformations.
-Since this code makes no difference on little-endian machines, simplify
-it to use the same code path everywhere.
----
- src/librustc_target/abi/call/powerpc64.rs | 29 ++++++++++-------------
- src/librustc_target/abi/mod.rs | 2 +-
- 2 files changed, 13 insertions(+), 18 deletions(-)
-
-diff --git a/src/librustc_target/abi/call/powerpc64.rs b/src/librustc_target/abi/call/powerpc64.rs
-index 0c5ec77a39..934d2b1138 100644
---- a/src/librustc_target/abi/call/powerpc64.rs
-+++ b/src/librustc_target/abi/call/powerpc64.rs
-@@ -75,7 +75,9 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, abi: ABI)
- let size = ret.layout.size;
- let bits = size.bits();
- if bits <= 128 {
-- let unit = if bits <= 8 {
-+ let unit = if cx.data_layout().endian == Endian::Big {
-+ Reg { kind: RegKind::Integer, size }
-+ } else if bits <= 8 {
- Reg::i8()
- } else if bits <= 16 {
- Reg::i16()
-@@ -110,22 +112,15 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI)
- }
-
- let size = arg.layout.size;
-- let (unit, total) = match abi {
-- ELFv1 => {
-- // In ELFv1, aggregates smaller than a doubleword should appear in
-- // the least-significant bits of the parameter doubleword. The rest
-- // should be padded at their tail to fill out multiple doublewords.
-- if size.bits() <= 64 {
-- (Reg { kind: RegKind::Integer, size }, size)
-- } else {
-- let align = Align::from_bits(64, 64).unwrap();
-- (Reg::i64(), size.abi_align(align))
-- }
-- },
-- ELFv2 => {
-- // In ELFv2, we can just cast directly.
-- (Reg::i64(), size)
-- },
-+ let (unit, total) = if size.bits() <= 64 {
-+ // Aggregates smaller than a doubleword should appear in
-+ // the least-significant bits of the parameter doubleword.
-+ (Reg { kind: RegKind::Integer, size }, size)
-+ } else {
-+ // Aggregates larger than a doubleword should be padded
-+ // at the tail to fill out a whole number of doublewords.
-+ let align = Align::from_bits(64, 64).unwrap();
-+ (Reg::i64(), size.abi_align(align))
- };
-
- arg.cast_to(Uniform {
-diff --git a/src/librustc_target/abi/mod.rs b/src/librustc_target/abi/mod.rs
-index 1a5d2801af..0ec2cc0d7b 100644
---- a/src/librustc_target/abi/mod.rs
-+++ b/src/librustc_target/abi/mod.rs
-@@ -214,7 +214,7 @@ impl<'a> HasDataLayout for &'a TargetDataLayout {
- }
-
- /// Endianness of the target, which must match cfg(target-endian).
--#[derive(Copy, Clone)]
-+#[derive(Copy, Clone, PartialEq)]
- pub enum Endian {
- Little,
- Big
---
-2.19.2
-
diff --git a/user/rust/0008-test-enum-debug-Correct-minimum-LLVM-version.patch b/user/rust/0008-test-enum-debug-Correct-minimum-LLVM-version.patch
new file mode 100644
index 000000000..dddaf75bf
--- /dev/null
+++ b/user/rust/0008-test-enum-debug-Correct-minimum-LLVM-version.patch
@@ -0,0 +1,67 @@
+From 8dbda1b85a15b04300f73eb30c1e2da196750cfb Mon Sep 17 00:00:00 2001
+From: Samuel Holland <samuel@sholland.org>
+Date: Fri, 22 Mar 2019 17:08:06 -0500
+Subject: [PATCH 08/14] test/enum-debug*: Correct minimum LLVM version
+
+---
+ src/test/codegen/enum-debug-clike.rs | 2 +-
+ src/test/codegen/enum-debug-niche-2.rs | 2 +-
+ src/test/codegen/enum-debug-niche.rs | 2 +-
+ src/test/codegen/enum-debug-tagged.rs | 2 +-
+ 4 files changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/src/test/codegen/enum-debug-clike.rs b/src/test/codegen/enum-debug-clike.rs
+index 98f07505f7..11e2fae596 100644
+--- a/src/test/codegen/enum-debug-clike.rs
++++ b/src/test/codegen/enum-debug-clike.rs
+@@ -4,7 +4,7 @@
+
+ // ignore-tidy-linelength
+ // ignore-windows
+-// min-system-llvm-version 7.0
++// min-system-llvm-version 8.0
+
+ // compile-flags: -g -C no-prepopulate-passes
+
+diff --git a/src/test/codegen/enum-debug-niche-2.rs b/src/test/codegen/enum-debug-niche-2.rs
+index 369fa1fcfd..aa6cbf66b4 100644
+--- a/src/test/codegen/enum-debug-niche-2.rs
++++ b/src/test/codegen/enum-debug-niche-2.rs
+@@ -4,7 +4,7 @@
+
+ // ignore-tidy-linelength
+ // ignore-windows
+-// min-system-llvm-version 7.0
++// min-system-llvm-version 8.0
+
+ // compile-flags: -g -C no-prepopulate-passes
+
+diff --git a/src/test/codegen/enum-debug-niche.rs b/src/test/codegen/enum-debug-niche.rs
+index 1273a785ba..93eebde7b8 100644
+--- a/src/test/codegen/enum-debug-niche.rs
++++ b/src/test/codegen/enum-debug-niche.rs
+@@ -4,7 +4,7 @@
+
+ // ignore-tidy-linelength
+ // ignore-windows
+-// min-system-llvm-version 7.0
++// min-system-llvm-version 8.0
+
+ // compile-flags: -g -C no-prepopulate-passes
+
+diff --git a/src/test/codegen/enum-debug-tagged.rs b/src/test/codegen/enum-debug-tagged.rs
+index 5c37e40c27..84976ce97a 100644
+--- a/src/test/codegen/enum-debug-tagged.rs
++++ b/src/test/codegen/enum-debug-tagged.rs
+@@ -4,7 +4,7 @@
+
+ // ignore-tidy-linelength
+ // ignore-windows
+-// min-system-llvm-version 7.0
++// min-system-llvm-version 8.0
+
+ // compile-flags: -g -C no-prepopulate-passes
+
+--
+2.21.0
+
diff --git a/user/rust/0009-Use-the-ELFv2-ABI-on-powerpc64-musl.patch b/user/rust/0009-Use-the-ELFv2-ABI-on-powerpc64-musl.patch
deleted file mode 100644
index c6da310da..000000000
--- a/user/rust/0009-Use-the-ELFv2-ABI-on-powerpc64-musl.patch
+++ /dev/null
@@ -1,50 +0,0 @@
-From 44c55a243178d34d5a21e08abc38d103e6ddb0c8 Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Wed, 8 Aug 2018 22:06:18 -0500
-Subject: [PATCH 09/23] Use the ELFv2 ABI on powerpc64 musl
-
----
- src/librustc_target/abi/call/powerpc64.rs | 15 ++++++++++-----
- 1 file changed, 10 insertions(+), 5 deletions(-)
-
-diff --git a/src/librustc_target/abi/call/powerpc64.rs b/src/librustc_target/abi/call/powerpc64.rs
-index 934d2b1138..18d14495e2 100644
---- a/src/librustc_target/abi/call/powerpc64.rs
-+++ b/src/librustc_target/abi/call/powerpc64.rs
-@@ -14,11 +14,12 @@
-
- use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
- use abi::{Align, Endian, HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
-+use spec::HasTargetSpec;
-
- #[derive(Debug, Clone, Copy, PartialEq)]
- enum ABI {
- ELFv1, // original ABI used for powerpc64 (big-endian)
-- ELFv2, // newer ABI used for powerpc64le
-+ ELFv2, // newer ABI used for powerpc64le and musl (both endians)
- }
- use self::ABI::*;
-
-@@ -131,11 +132,15 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI)
-
- pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
- where Ty: TyLayoutMethods<'a, C> + Copy,
-- C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
-+ C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
- {
-- let abi = match cx.data_layout().endian {
-- Endian::Big => ELFv1,
-- Endian::Little => ELFv2,
-+ let abi = if cx.target_spec().target_env == "musl" {
-+ ELFv2
-+ } else {
-+ match cx.data_layout().endian {
-+ Endian::Big => ELFv1,
-+ Endian::Little => ELFv2
-+ }
- };
-
- if !fty.ret.is_ignore() {
---
-2.19.2
-
diff --git a/user/rust/0018-test-use-extern-for-plugins-Don-t-assume-multilib.patch b/user/rust/0009-test-use-extern-for-plugins-Don-t-assume-multilib.patch
index 7d63cc1cc..da9d25537 100644
--- a/user/rust/0018-test-use-extern-for-plugins-Don-t-assume-multilib.patch
+++ b/user/rust/0009-test-use-extern-for-plugins-Don-t-assume-multilib.patch
@@ -1,7 +1,7 @@
-From 8fbb4ae754fff3f4daa82ad222b3743aace0aa97 Mon Sep 17 00:00:00 2001
+From b7fdd2b4b192e1ac7e59fe009e7b7bba8943c8dc Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Sat, 6 Oct 2018 04:01:48 +0000
-Subject: [PATCH 18/23] test/use-extern-for-plugins: Don't assume multilib
+Subject: [PATCH 09/14] test/use-extern-for-plugins: Don't assume multilib
---
src/test/run-make-fulldeps/use-extern-for-plugins/Makefile | 7 +------
@@ -26,5 +26,5 @@ index cc7bc176f4..36553f1e44 100644
all:
$(RUSTC) foo.rs -C extra-filename=-host
--
-2.19.2
+2.21.0
diff --git a/user/rust/0010-Add-powerpc64-unknown-linux-musl-target.patch b/user/rust/0010-Add-powerpc64-unknown-linux-musl-target.patch
deleted file mode 100644
index a575b1345..000000000
--- a/user/rust/0010-Add-powerpc64-unknown-linux-musl-target.patch
+++ /dev/null
@@ -1,67 +0,0 @@
-From c0d3804aa53dee33c03ebc8b64bd6f7a7b54044c Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Tue, 7 Aug 2018 21:59:15 -0500
-Subject: [PATCH 10/23] Add powerpc64-unknown-linux-musl target
-
----
- src/librustc_target/spec/mod.rs | 1 +
- .../spec/powerpc64_unknown_linux_musl.rs | 35 +++++++++++++++++++
- 2 files changed, 36 insertions(+)
- create mode 100644 src/librustc_target/spec/powerpc64_unknown_linux_musl.rs
-
-diff --git a/src/librustc_target/spec/mod.rs b/src/librustc_target/spec/mod.rs
-index 4b03f6141f..f75c4940c6 100644
---- a/src/librustc_target/spec/mod.rs
-+++ b/src/librustc_target/spec/mod.rs
-@@ -299,6 +299,7 @@ supported_targets! {
- ("powerpc-unknown-linux-gnuspe", powerpc_unknown_linux_gnuspe),
- ("powerpc-unknown-linux-musl", powerpc_unknown_linux_musl),
- ("powerpc64-unknown-linux-gnu", powerpc64_unknown_linux_gnu),
-+ ("powerpc64-unknown-linux-musl", powerpc64_unknown_linux_musl),
- ("powerpc64le-unknown-linux-gnu", powerpc64le_unknown_linux_gnu),
- ("powerpc64le-unknown-linux-musl", powerpc64le_unknown_linux_musl),
- ("s390x-unknown-linux-gnu", s390x_unknown_linux_gnu),
-diff --git a/src/librustc_target/spec/powerpc64_unknown_linux_musl.rs b/src/librustc_target/spec/powerpc64_unknown_linux_musl.rs
-new file mode 100644
-index 0000000000..24ff9e0ecd
---- /dev/null
-+++ b/src/librustc_target/spec/powerpc64_unknown_linux_musl.rs
-@@ -0,0 +1,35 @@
-+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
-+// file at the top-level directory of this distribution and at
-+// http://rust-lang.org/COPYRIGHT.
-+//
-+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-+// option. This file may not be copied, modified, or distributed
-+// except according to those terms.
-+
-+use spec::{LinkerFlavor, Target, TargetResult};
-+
-+pub fn target() -> TargetResult {
-+ let mut base = super::linux_musl_base::opts();
-+ base.cpu = "ppc64".to_string();
-+ base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
-+ base.max_atomic_width = Some(64);
-+
-+ // see #36994
-+ base.exe_allocation_crate = None;
-+
-+ Ok(Target {
-+ llvm_target: "powerpc64-unknown-linux-musl".to_string(),
-+ target_endian: "big".to_string(),
-+ target_pointer_width: "64".to_string(),
-+ target_c_int_width: "32".to_string(),
-+ data_layout: "E-m:e-i64:64-n32:64".to_string(),
-+ arch: "powerpc64".to_string(),
-+ target_os: "linux".to_string(),
-+ target_env: "musl".to_string(),
-+ target_vendor: "unknown".to_string(),
-+ linker_flavor: LinkerFlavor::Gcc,
-+ options: base,
-+ })
-+}
---
-2.19.2
-
diff --git a/user/rust/0019-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch b/user/rust/0010-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch
index 4f4b42a9c..ff49416e0 100644
--- a/user/rust/0019-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch
+++ b/user/rust/0010-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch
@@ -1,7 +1,7 @@
-From 8d74dfc55bad7461bbbc26a83c4103cc419bbf50 Mon Sep 17 00:00:00 2001
+From 4ca0a1760317d3029bfcb0e6253252b926ef92ce Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Mon, 17 Sep 2018 01:32:20 +0000
-Subject: [PATCH 19/23] test/sysroot-crates-are-unstable: Fix test when rpath
+Subject: [PATCH 10/14] test/sysroot-crates-are-unstable: Fix test when rpath
is disabled
Without this environment var, the test can't run rustc to find
@@ -21,5 +21,5 @@ index a35174b3c2..9e77070685 100644
- python2.7 test.py
+ env '$(HOST_RPATH_ENV)' python2.7 test.py
--
-2.19.2
+2.21.0
diff --git a/user/rust/0020-Ignore-broken-and-non-applicable-tests.patch b/user/rust/0011-Ignore-broken-and-non-applicable-tests.patch
index 3305b52d0..ebf711f77 100644
--- a/user/rust/0020-Ignore-broken-and-non-applicable-tests.patch
+++ b/user/rust/0011-Ignore-broken-and-non-applicable-tests.patch
@@ -1,7 +1,7 @@
-From 5fd5ee6042170262f6832b65a90c54989f6a9847 Mon Sep 17 00:00:00 2001
+From 63a413a24f98461476aac7ac7ead69c948874e7d Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Sun, 16 Sep 2018 16:38:48 +0000
-Subject: [PATCH 20/23] Ignore broken and non-applicable tests
+Subject: [PATCH 11/14] Ignore broken and non-applicable tests
long-linker-command-lines: takes >10 minutes to run (but still passes)
sparc-struct-abi: no sparc target
@@ -10,13 +10,14 @@ sysroot-crates-are-unstable: can't run rustc without rpath
src/test/codegen/sparc-struct-abi.rs | 1 +
src/test/run-make-fulldeps/long-linker-command-lines/Makefile | 1 -
src/test/run-make-fulldeps/sysroot-crates-are-unstable/Makefile | 2 +-
- 3 files changed, 2 insertions(+), 2 deletions(-)
+ src/test/run-pass/simd/simd-intrinsic-generic-select.rs | 2 ++
+ 4 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/src/test/codegen/sparc-struct-abi.rs b/src/test/codegen/sparc-struct-abi.rs
-index 56c4364d59..9aebf8f002 100644
+index 78e5b14a21..6f93e93286 100644
--- a/src/test/codegen/sparc-struct-abi.rs
+++ b/src/test/codegen/sparc-struct-abi.rs
-@@ -13,6 +13,7 @@
+@@ -4,6 +4,7 @@
// only-sparc64
// compile-flags: -O --target=sparc64-unknown-linux-gnu --crate-type=rlib
@@ -43,6 +44,19 @@ index 9e77070685..7784230b46 100644
all:
- env '$(HOST_RPATH_ENV)' python2.7 test.py
+ true
+diff --git a/src/test/run-pass/simd/simd-intrinsic-generic-select.rs b/src/test/run-pass/simd/simd-intrinsic-generic-select.rs
+index f79b140494..39080c8c90 100644
+--- a/src/test/run-pass/simd/simd-intrinsic-generic-select.rs
++++ b/src/test/run-pass/simd/simd-intrinsic-generic-select.rs
+@@ -2,6 +2,8 @@
+ #![allow(non_camel_case_types)]
+
+ // ignore-emscripten
++// ignore-powerpc
++// ignore-powerpc64
+
+ // Test that the simd_select intrinsics produces correct results.
+
--
-2.19.2
+2.21.0
diff --git a/user/rust/0011-rustc_data_structures-use-libc-types-constants-in-fl.patch b/user/rust/0011-rustc_data_structures-use-libc-types-constants-in-fl.patch
deleted file mode 100644
index 9fb760bb1..000000000
--- a/user/rust/0011-rustc_data_structures-use-libc-types-constants-in-fl.patch
+++ /dev/null
@@ -1,209 +0,0 @@
-From c99b99efd9f9632ac4917e1c410bf34e3c2be78b Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Tue, 9 Oct 2018 04:15:48 +0000
-Subject: [PATCH 11/23] rustc_data_structures: use libc types/constants in
- flock
-
----
- src/librustc_data_structures/flock.rs | 155 ++------------------------
- 1 file changed, 10 insertions(+), 145 deletions(-)
-
-diff --git a/src/librustc_data_structures/flock.rs b/src/librustc_data_structures/flock.rs
-index 38ce331051..d5c38b9a39 100644
---- a/src/librustc_data_structures/flock.rs
-+++ b/src/librustc_data_structures/flock.rs
-@@ -26,143 +26,6 @@ cfg_if! {
- use std::os::unix::prelude::*;
- use libc;
-
-- #[cfg(any(target_os = "linux", target_os = "android"))]
-- mod os {
-- use libc;
--
-- #[repr(C)]
-- pub struct flock {
-- pub l_type: libc::c_short,
-- pub l_whence: libc::c_short,
-- pub l_start: libc::off_t,
-- pub l_len: libc::off_t,
-- pub l_pid: libc::pid_t,
--
-- // not actually here, but brings in line with freebsd
-- pub l_sysid: libc::c_int,
-- }
--
-- pub const F_RDLCK: libc::c_short = 0;
-- pub const F_WRLCK: libc::c_short = 1;
-- pub const F_UNLCK: libc::c_short = 2;
-- pub const F_SETLK: libc::c_int = 6;
-- pub const F_SETLKW: libc::c_int = 7;
-- }
--
-- #[cfg(target_os = "freebsd")]
-- mod os {
-- use libc;
--
-- #[repr(C)]
-- pub struct flock {
-- pub l_start: libc::off_t,
-- pub l_len: libc::off_t,
-- pub l_pid: libc::pid_t,
-- pub l_type: libc::c_short,
-- pub l_whence: libc::c_short,
-- pub l_sysid: libc::c_int,
-- }
--
-- pub const F_RDLCK: libc::c_short = 1;
-- pub const F_UNLCK: libc::c_short = 2;
-- pub const F_WRLCK: libc::c_short = 3;
-- pub const F_SETLK: libc::c_int = 12;
-- pub const F_SETLKW: libc::c_int = 13;
-- }
--
-- #[cfg(any(target_os = "dragonfly",
-- target_os = "bitrig",
-- target_os = "netbsd",
-- target_os = "openbsd"))]
-- mod os {
-- use libc;
--
-- #[repr(C)]
-- pub struct flock {
-- pub l_start: libc::off_t,
-- pub l_len: libc::off_t,
-- pub l_pid: libc::pid_t,
-- pub l_type: libc::c_short,
-- pub l_whence: libc::c_short,
--
-- // not actually here, but brings in line with freebsd
-- pub l_sysid: libc::c_int,
-- }
--
-- pub const F_RDLCK: libc::c_short = 1;
-- pub const F_UNLCK: libc::c_short = 2;
-- pub const F_WRLCK: libc::c_short = 3;
-- pub const F_SETLK: libc::c_int = 8;
-- pub const F_SETLKW: libc::c_int = 9;
-- }
--
-- #[cfg(target_os = "haiku")]
-- mod os {
-- use libc;
--
-- #[repr(C)]
-- pub struct flock {
-- pub l_type: libc::c_short,
-- pub l_whence: libc::c_short,
-- pub l_start: libc::off_t,
-- pub l_len: libc::off_t,
-- pub l_pid: libc::pid_t,
--
-- // not actually here, but brings in line with freebsd
-- pub l_sysid: libc::c_int,
-- }
--
-- pub const F_RDLCK: libc::c_short = 0x0040;
-- pub const F_UNLCK: libc::c_short = 0x0200;
-- pub const F_WRLCK: libc::c_short = 0x0400;
-- pub const F_SETLK: libc::c_int = 0x0080;
-- pub const F_SETLKW: libc::c_int = 0x0100;
-- }
--
-- #[cfg(any(target_os = "macos", target_os = "ios"))]
-- mod os {
-- use libc;
--
-- #[repr(C)]
-- pub struct flock {
-- pub l_start: libc::off_t,
-- pub l_len: libc::off_t,
-- pub l_pid: libc::pid_t,
-- pub l_type: libc::c_short,
-- pub l_whence: libc::c_short,
--
-- // not actually here, but brings in line with freebsd
-- pub l_sysid: libc::c_int,
-- }
--
-- pub const F_RDLCK: libc::c_short = 1;
-- pub const F_UNLCK: libc::c_short = 2;
-- pub const F_WRLCK: libc::c_short = 3;
-- pub const F_SETLK: libc::c_int = 8;
-- pub const F_SETLKW: libc::c_int = 9;
-- }
--
-- #[cfg(target_os = "solaris")]
-- mod os {
-- use libc;
--
-- #[repr(C)]
-- pub struct flock {
-- pub l_type: libc::c_short,
-- pub l_whence: libc::c_short,
-- pub l_start: libc::off_t,
-- pub l_len: libc::off_t,
-- pub l_sysid: libc::c_int,
-- pub l_pid: libc::pid_t,
-- }
--
-- pub const F_RDLCK: libc::c_short = 1;
-- pub const F_WRLCK: libc::c_short = 2;
-- pub const F_UNLCK: libc::c_short = 3;
-- pub const F_SETLK: libc::c_int = 6;
-- pub const F_SETLKW: libc::c_int = 7;
-- }
--
- #[derive(Debug)]
- pub struct Lock {
- fd: libc::c_int,
-@@ -192,20 +55,21 @@ cfg_if! {
- }
-
- let lock_type = if exclusive {
-- os::F_WRLCK
-+ libc::F_WRLCK
- } else {
-- os::F_RDLCK
-+ libc::F_RDLCK
- };
-
-- let flock = os::flock {
-+ let flock = libc::flock {
- l_start: 0,
- l_len: 0,
- l_pid: 0,
- l_whence: libc::SEEK_SET as libc::c_short,
-- l_type: lock_type,
-+ l_type: lock_type as libc::c_short,
-+ #[cfg(any(target_os = "freebsd", target_os = "solaris"))]
- l_sysid: 0,
- };
-- let cmd = if wait { os::F_SETLKW } else { os::F_SETLK };
-+ let cmd = if wait { libc::F_SETLKW } else { libc::F_SETLK };
- let ret = unsafe {
- libc::fcntl(fd, cmd, &flock)
- };
-@@ -221,16 +85,17 @@ cfg_if! {
-
- impl Drop for Lock {
- fn drop(&mut self) {
-- let flock = os::flock {
-+ let flock = libc::flock {
- l_start: 0,
- l_len: 0,
- l_pid: 0,
- l_whence: libc::SEEK_SET as libc::c_short,
-- l_type: os::F_UNLCK,
-+ l_type: libc::F_UNLCK as libc::c_short,
-+ #[cfg(any(target_os = "freebsd", target_os = "solaris"))]
- l_sysid: 0,
- };
- unsafe {
-- libc::fcntl(self.fd, os::F_SETLK, &flock);
-+ libc::fcntl(self.fd, libc::F_SETLK, &flock);
- libc::close(self.fd);
- }
- }
---
-2.19.2
-
diff --git a/user/rust/0021-Link-stage-2-tools-dynamically-to-libstd.patch b/user/rust/0012-Link-stage-2-tools-dynamically-to-libstd.patch
index dfb43d9b7..8d37b335c 100644
--- a/user/rust/0021-Link-stage-2-tools-dynamically-to-libstd.patch
+++ b/user/rust/0012-Link-stage-2-tools-dynamically-to-libstd.patch
@@ -1,17 +1,17 @@
-From 0aac206a1c7af54f5974eb4043b0f821720f4bf7 Mon Sep 17 00:00:00 2001
+From 51b5ec642911295036d4ff19d4d6799037da3efc Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Mon, 24 Sep 2018 23:42:23 +0000
-Subject: [PATCH 21/23] Link stage 2 tools dynamically to libstd
+Subject: [PATCH 12/14] Link stage 2 tools dynamically to libstd
---
src/bootstrap/tool.rs | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
-index 6868a063ce..3a91b4fa07 100644
+index 9f6db73e6f..69cc2219c1 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
-@@ -217,7 +217,9 @@ pub fn prepare_tool_cargo(
+@@ -208,7 +208,9 @@ pub fn prepare_tool_cargo(
// We don't want to build tools dynamically as they'll be running across
// stages and such and it's just easier if they're not dynamically linked.
@@ -23,5 +23,5 @@ index 6868a063ce..3a91b4fa07 100644
if source_type == SourceType::Submodule {
cargo.env("RUSTC_EXTERNAL_TOOL", "1");
--
-2.19.2
+2.21.0
diff --git a/user/rust/0013-Fix-double_check-tests-on-big-endian-targets.patch b/user/rust/0013-Fix-double_check-tests-on-big-endian-targets.patch
deleted file mode 100644
index 546afbf20..000000000
--- a/user/rust/0013-Fix-double_check-tests-on-big-endian-targets.patch
+++ /dev/null
@@ -1,75 +0,0 @@
-From cdcbabdffce7f1b5379faf96db0dc12e4a5f1a36 Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Sun, 16 Sep 2018 18:27:56 +0000
-Subject: [PATCH 13/23] Fix double_check tests on big-endian targets
-
-Since the enums get optimized down to 1 byte long, the bits
-set in the usize member don't align with the enums on big-endian
-machines. Avoid this issue by shrinking the integer member to the
-same size as the enums.
----
- src/test/ui/consts/const-eval/double_check.rs | 8 ++++----
- src/test/ui/consts/const-eval/double_check2.rs | 8 ++++----
- src/test/ui/consts/const-eval/double_check2.stderr | 4 ++--
- 3 files changed, 10 insertions(+), 10 deletions(-)
-
-diff --git a/src/test/ui/consts/const-eval/double_check.rs b/src/test/ui/consts/const-eval/double_check.rs
-index 81f6e7ddd2..76f9276c05 100644
---- a/src/test/ui/consts/const-eval/double_check.rs
-+++ b/src/test/ui/consts/const-eval/double_check.rs
-@@ -21,12 +21,12 @@ enum Bar {
- union Union {
- foo: &'static Foo,
- bar: &'static Bar,
-- usize: &'static usize,
-+ u8: &'static u8,
- }
--static BAR: usize = 42;
-+static BAR: u8 = 42;
- static FOO: (&Foo, &Bar) = unsafe {(
-- Union { usize: &BAR }.foo,
-- Union { usize: &BAR }.bar,
-+ Union { u8: &BAR }.foo,
-+ Union { u8: &BAR }.bar,
- )};
-
- fn main() {}
-diff --git a/src/test/ui/consts/const-eval/double_check2.rs b/src/test/ui/consts/const-eval/double_check2.rs
-index b661ee9247..701632362c 100644
---- a/src/test/ui/consts/const-eval/double_check2.rs
-+++ b/src/test/ui/consts/const-eval/double_check2.rs
-@@ -19,12 +19,12 @@ enum Bar {
- union Union {
- foo: &'static Foo,
- bar: &'static Bar,
-- usize: &'static usize,
-+ u8: &'static u8,
- }
--static BAR: usize = 5;
-+static BAR: u8 = 5;
- static FOO: (&Foo, &Bar) = unsafe {( //~ undefined behavior
-- Union { usize: &BAR }.foo,
-- Union { usize: &BAR }.bar,
-+ Union { u8: &BAR }.foo,
-+ Union { u8: &BAR }.bar,
- )};
-
- fn main() {}
-diff --git a/src/test/ui/consts/const-eval/double_check2.stderr b/src/test/ui/consts/const-eval/double_check2.stderr
-index 2102587734..78a112304e 100644
---- a/src/test/ui/consts/const-eval/double_check2.stderr
-+++ b/src/test/ui/consts/const-eval/double_check2.stderr
-@@ -2,8 +2,8 @@ error[E0080]: this static likely exhibits undefined behavior
- --> $DIR/double_check2.rs:25:1
- |
- LL | / static FOO: (&Foo, &Bar) = unsafe {( //~ undefined behavior
--LL | | Union { usize: &BAR }.foo,
--LL | | Union { usize: &BAR }.bar,
-+LL | | Union { u8: &BAR }.foo,
-+LL | | Union { u8: &BAR }.bar,
- LL | | )};
- | |___^ type validation failed: encountered invalid enum discriminant 5 at .1.<deref>
- |
---
-2.19.2
-
diff --git a/user/rust/0022-Move-debugger-scripts-to-usr-share-rust.patch b/user/rust/0013-Move-debugger-scripts-to-usr-share-rust.patch
index a46ddcd45..a401dc5ab 100644
--- a/user/rust/0022-Move-debugger-scripts-to-usr-share-rust.patch
+++ b/user/rust/0013-Move-debugger-scripts-to-usr-share-rust.patch
@@ -1,7 +1,7 @@
-From 347b144930e68e49dc27b3682751a1d4c13d81c5 Mon Sep 17 00:00:00 2001
+From 17bb016bf9eefb42e1205b5f8d7b944147337f24 Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Mon, 17 Sep 2018 02:09:10 +0000
-Subject: [PATCH 22/23] Move debugger scripts to /usr/share/rust
+Subject: [PATCH 13/14] Move debugger scripts to /usr/share/rust
---
src/bootstrap/dist.rs | 2 +-
@@ -10,10 +10,10 @@ Subject: [PATCH 22/23] Move debugger scripts to /usr/share/rust
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
-index fea6302d0a..e6ba249d28 100644
+index 0c6e213110..1858fade71 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
-@@ -591,7 +591,7 @@ impl Step for DebuggerScripts {
+@@ -583,7 +583,7 @@ impl Step for DebuggerScripts {
fn run(self, builder: &Builder) {
let host = self.host;
let sysroot = self.sysroot;
@@ -23,10 +23,10 @@ index fea6302d0a..e6ba249d28 100644
let cp_debugger_script = |file: &str| {
builder.install(&builder.src.join("src/etc/").join(file), &dst, 0o644);
diff --git a/src/etc/rust-gdb b/src/etc/rust-gdb
-index 743952a5be..71694ddb87 100755
+index 23ba93da8e..dc51b16c57 100755
--- a/src/etc/rust-gdb
+++ b/src/etc/rust-gdb
-@@ -14,7 +14,7 @@ set -e
+@@ -4,7 +4,7 @@ set -e
# Find out where the pretty printer Python module is
RUSTC_SYSROOT=`rustc --print=sysroot`
@@ -36,10 +36,10 @@ index 743952a5be..71694ddb87 100755
# Run GDB with the additional arguments that load the pretty printers
# Set the environment variable `RUST_GDB` to overwrite the call to a
diff --git a/src/etc/rust-lldb b/src/etc/rust-lldb
-index 6ed8210349..f115587ce1 100755
+index 424302d495..460e119210 100755
--- a/src/etc/rust-lldb
+++ b/src/etc/rust-lldb
-@@ -35,7 +35,7 @@ display the contents of local variables!"
+@@ -26,7 +26,7 @@ display the contents of local variables!"
fi
# Prepare commands that will be loaded before any file on the command line has been loaded
@@ -49,5 +49,5 @@ index 6ed8210349..f115587ce1 100755
category_enable="type category enable Rust"
--
-2.19.2
+2.21.0
diff --git a/user/rust/0023-Add-foxkit-target-specs.patch b/user/rust/0014-Add-foxkit-target-specs.patch
index 982326bc6..ecbfe26b5 100644
--- a/user/rust/0023-Add-foxkit-target-specs.patch
+++ b/user/rust/0014-Add-foxkit-target-specs.patch
@@ -1,7 +1,7 @@
-From 8094b006c75c38ab0e5d44d9b5c12a973949e512 Mon Sep 17 00:00:00 2001
+From 2ca1aeb3cb3e0d52917cec2a96101aba5c6e40ce Mon Sep 17 00:00:00 2001
From: Samuel Holland <samuel@sholland.org>
Date: Mon, 17 Sep 2018 02:29:06 +0000
-Subject: [PATCH 23/23] Add foxkit target specs
+Subject: [PATCH 14/14] Add foxkit target specs
---
.../spec/aarch64_foxkit_linux_musl.rs | 21 +++++++++++++++++
@@ -103,23 +103,23 @@ index 0000000000..e0284d96e6
+ Ok(base)
+}
diff --git a/src/librustc_target/spec/mod.rs b/src/librustc_target/spec/mod.rs
-index f75c4940c6..d4e0f04144 100644
+index f42b0a1c3c..f83be3e4e3 100644
--- a/src/librustc_target/spec/mod.rs
+++ b/src/librustc_target/spec/mod.rs
-@@ -414,6 +414,13 @@ supported_targets! {
- ("riscv32imac-unknown-none-elf", riscv32imac_unknown_none_elf),
+@@ -278,6 +278,13 @@ macro_rules! supported_targets {
+ }
- ("aarch64-unknown-none", aarch64_unknown_none),
-+
+ supported_targets! {
+ ("aarch64-foxkit-linux-musl", aarch64_foxkit_linux_musl),
+ ("armv7-foxkit-linux-musleabihf", armv7_foxkit_linux_musleabihf),
+ ("i586-foxkit-linux-musl", i586_foxkit_linux_musl),
+ ("powerpc-foxkit-linux-musl", powerpc_foxkit_linux_musl),
+ ("powerpc64-foxkit-linux-musl", powerpc64_foxkit_linux_musl),
+ ("x86_64-foxkit-linux-musl", x86_64_foxkit_linux_musl),
- }
-
- /// Everything `rustc` knows about how to compile for a specific target.
++
+ ("x86_64-unknown-linux-gnu", x86_64_unknown_linux_gnu),
+ ("x86_64-unknown-linux-gnux32", x86_64_unknown_linux_gnux32),
+ ("i686-unknown-linux-gnu", i686_unknown_linux_gnu),
diff --git a/src/librustc_target/spec/powerpc64_foxkit_linux_musl.rs b/src/librustc_target/spec/powerpc64_foxkit_linux_musl.rs
new file mode 100644
index 0000000000..b7202ee0fc
@@ -204,5 +204,5 @@ index 0000000000..801dd8d6f4
+ Ok(base)
+}
--
-2.19.2
+2.21.0
diff --git a/user/rust/0014-test-invalid_const_promotion-Accept-SIGTRAP-as-a-val.patch b/user/rust/0014-test-invalid_const_promotion-Accept-SIGTRAP-as-a-val.patch
deleted file mode 100644
index e892735a2..000000000
--- a/user/rust/0014-test-invalid_const_promotion-Accept-SIGTRAP-as-a-val.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From 52f262296db19bcb35410a05786e58a239216ba8 Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Sun, 16 Sep 2018 16:37:09 +0000
-Subject: [PATCH 14/23] test/invalid_const_promotion: Accept SIGTRAP as a valid
- death signal
-
----
- src/test/run-pass/invalid_const_promotion.rs | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/src/test/run-pass/invalid_const_promotion.rs b/src/test/run-pass/invalid_const_promotion.rs
-index ed8c499241..0f354e1aad 100644
---- a/src/test/run-pass/invalid_const_promotion.rs
-+++ b/src/test/run-pass/invalid_const_promotion.rs
-@@ -39,6 +39,7 @@ fn check_status(status: std::process::ExitStatus)
- use std::os::unix::process::ExitStatusExt;
-
- assert!(status.signal() == Some(libc::SIGILL)
-+ || status.signal() == Some(libc::SIGTRAP)
- || status.signal() == Some(libc::SIGABRT));
- }
-
---
-2.19.2
-
diff --git a/user/rust/0015-test-linkage-visibility-Ensure-symbols-are-visible-t.patch b/user/rust/0015-test-linkage-visibility-Ensure-symbols-are-visible-t.patch
deleted file mode 100644
index f4591af96..000000000
--- a/user/rust/0015-test-linkage-visibility-Ensure-symbols-are-visible-t.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-From cc2e1f8ee1d28a772c0309b226e996359180124a Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Sun, 16 Sep 2018 16:35:41 +0000
-Subject: [PATCH 15/23] test/linkage-visibility: Ensure symbols are visible to
- dlsym
-
-DynamicLibrary uses libc's dlsym() function internally to find symbols.
-Some implementations of dlsym(), like musl's, only look at dynamically-
-exported symbols, as found in shared libraries. To also export symbols
-from the main executable, pass --export-dynamic to the linker.
----
- src/test/run-pass-fulldeps/auxiliary/linkage-visibility.rs | 2 ++
- 1 file changed, 2 insertions(+)
-
-diff --git a/src/test/run-pass-fulldeps/auxiliary/linkage-visibility.rs b/src/test/run-pass-fulldeps/auxiliary/linkage-visibility.rs
-index 4ea3d0d0d0..b958e1a1fc 100644
---- a/src/test/run-pass-fulldeps/auxiliary/linkage-visibility.rs
-+++ b/src/test/run-pass-fulldeps/auxiliary/linkage-visibility.rs
-@@ -8,6 +8,8 @@
- // option. This file may not be copied, modified, or distributed
- // except according to those terms.
-
-+// compile-flags: -C link-arg=-Wl,--export-dynamic
-+
- #![feature(rustc_private)]
-
- // We're testing linkage visibility; the compiler warns us, but we want to
---
-2.19.2
-
diff --git a/user/rust/0016-x.py-Use-python3-instead-of-python.patch b/user/rust/0016-x.py-Use-python3-instead-of-python.patch
deleted file mode 100644
index 37302b773..000000000
--- a/user/rust/0016-x.py-Use-python3-instead-of-python.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-From 591dc52ab207945a5c1b488b88cbd1cfbb1c8c94 Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Mon, 10 Sep 2018 01:36:00 +0000
-Subject: [PATCH 16/23] x.py: Use python3 instead of python
-
----
- x.py | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/x.py b/x.py
-index e277ab98be..a75176a3f9 100755
---- a/x.py
-+++ b/x.py
-@@ -1,4 +1,4 @@
--#!/usr/bin/env python
-+#!/usr/bin/env python3
- # Copyright 2016 The Rust Project Developers. See the COPYRIGHT
- # file at the top-level directory of this distribution and at
- # http://rust-lang.org/COPYRIGHT.
---
-2.19.2
-
diff --git a/user/rust/0017-test-target-feature-gate-Only-run-on-relevant-target.patch b/user/rust/0017-test-target-feature-gate-Only-run-on-relevant-target.patch
deleted file mode 100644
index 5e8c71668..000000000
--- a/user/rust/0017-test-target-feature-gate-Only-run-on-relevant-target.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From bc1a70133b259188ec4595eef8d11670cc8d64f5 Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Sun, 16 Sep 2018 16:39:46 +0000
-Subject: [PATCH 17/23] test/target-feature-gate: Only run on relevant targets
-
----
- src/test/ui/target-feature-gate.rs | 2 ++
- src/test/ui/target-feature-gate.stderr | 2 +-
- 2 files changed, 3 insertions(+), 1 deletion(-)
-
-diff --git a/src/test/ui/target-feature-gate.rs b/src/test/ui/target-feature-gate.rs
-index 8a045884ca..5e90417fc3 100644
---- a/src/test/ui/target-feature-gate.rs
-+++ b/src/test/ui/target-feature-gate.rs
-@@ -10,6 +10,8 @@
-
- // ignore-arm
- // ignore-aarch64
-+// ignore-powerpc
-+// ignore-powerpc64
- // ignore-wasm
- // ignore-emscripten
- // ignore-mips
-diff --git a/src/test/ui/target-feature-gate.stderr b/src/test/ui/target-feature-gate.stderr
-index f18bebc0c2..5458953601 100644
---- a/src/test/ui/target-feature-gate.stderr
-+++ b/src/test/ui/target-feature-gate.stderr
-@@ -1,5 +1,5 @@
- error[E0658]: the target feature `avx512bw` is currently unstable (see issue #44839)
-- --> $DIR/target-feature-gate.rs:29:18
-+ --> $DIR/target-feature-gate.rs:31:18
- |
- LL | #[target_feature(enable = "avx512bw")]
- | ^^^^^^^^^^^^^^^^^^^
---
-2.19.2
-
diff --git a/user/rust/0030-libc-linkage.patch b/user/rust/0030-libc-linkage.patch
new file mode 100644
index 000000000..754074355
--- /dev/null
+++ b/user/rust/0030-libc-linkage.patch
@@ -0,0 +1,23 @@
+--- a/vendor/libc/src/unix/mod.rs
++++ b/vendor/libc/src/unix/mod.rs
+@@ -287,11 +287,7 @@ cfg_if! {
+ // cargo build, don't pull in anything extra as the libstd dep
+ // already pulls in all libs.
+ } else if #[cfg(target_env = "musl")] {
+- #[cfg_attr(feature = "rustc-dep-of-std",
+- link(name = "c", kind = "static",
+- cfg(target_feature = "crt-static")))]
+- #[cfg_attr(feature = "rustc-dep-of-std",
+- link(name = "c", cfg(not(target_feature = "crt-static"))))]
++ #[link(name = "c")]
+ extern {}
+ } else if #[cfg(target_os = "emscripten")] {
+ #[link(name = "c")]
+diff --git rustc-1.33.0-src/vendor/libc/.cargo-checksum.json.orig rustc-1.33.0-src/vendor/libc/.cargo-checksum.json
+index b580eeb..4f3a87d 100644
+--- rustc-1.33.0-src/vendor/libc/.cargo-checksum.json.orig
++++ rustc-1.33.0-src/vendor/libc/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"e1bbf88a55a1ba674f5192fe1585b57a51751ee6c5aa7edeb87df8027c3bb912","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"40ddea9181e5374cf535aae33ad4ace80bd9b1beea8f881ff8d2b777da9489b2","build.rs":"2d1cbe87e0478de12a15a230d7ef0997020ba845b5b7d221d74cd2ce3fe0e720","src/cloudabi/aarch64.rs":"b8550bf1fd7344972aa4db29441486f39f31482d0327534981dbb75959c29114","src/cloudabi/arm.rs":"c197e2781c2839808bd6fcef219a29705b27b992d3ef920e9cf6ac96e2022bbf","src/cloudabi/mod.rs":"640375751825b8d4adf4fb089a53c4d883f265f96b07daf06f2dcf3acf6639c9","src/cloudabi/x86.rs":"33eb97f272d2201f3838ae74d444583c7de8f67856852ca375293b20bbd05636","src/cloudabi/x86_64.rs":"400d85d4fe39e26cf2e6ece9ee31c75fe9e88c4bcf4d836ca9f765c05c9c5be3","src/dox.rs":"b4b96dcba47797a6160c07f29982dd15d48fd2813d7d1000484bd9adc4a89de1","src/fuchsia/aarch64.rs":"40dce617877e606e92353491e1913711b1ecfa510bb983156f4a8460f490419e","src/fuchsia/mod.rs":"784dee21605f00a41d7cae1fc3d782646a84515a687a4952700eae51cc3840f0","src/fuchsia/x86_64.rs":"b4a3eff94dcf1ffe41e6500468ca2cff0e97ddbcc75fe079b6ac7adb1e493f56","src/lib.rs":"d456d34fc58c8d194f8f24201b4021860f61c5f125904bb8dcc954d1f813f6b7","src/macros.rs":"29d61c13523328c4742a7290dbb7d718ce198740824592e029199e9df5429b20","src/redox/mod.rs":"87cb35a988be84d45e79206f7cb5d098e8e7c5a1825cfcd0bd663453dedc3103","src/redox/net.rs":"f2e1922883f208cb46c00744da4a68feccfbec576c6981978ad404e46f818c8b","src/sgx.rs":"18d33a470d06c15b00be5a3bf45b6e51da6104ae7437f50c0eccbd3b79994389","src/switch.rs":"00365ed196519b260fc99a32f02d60b3fb1aaf0484654b19fec8c25bc1687420","src/unix/bsd/apple/b32.rs":"da60c99ee3d773f9c0767b93ccf18b67ad67576e2c58e2e86d05b23c23fd989a","src/unix/bsd/apple/b64.rs":"0cc15406c3a91abde65b28c7783ccab7801474ecc51c78dbd1d7332d4b834390","src/unix/bsd/apple/mod.rs":"b0e2d1f7fdb71d7a02bc5994c66a5ae660955227a0c0f808172a30b38fd40299","src/unix/bsd/freebsdlike/dragonfly/mod.rs":"6c69319675328d4d16939c9c2aba3fb8ca9381a8b390d18d766949307090304b","src/unix/bsd/freebsdlike/freebsd/aarch64.rs":"97132e2097411034271b8c927ecc94a208a361564680972a6c82998bd30a9826","src/unix/bsd/freebsdlike/freebsd/mod.rs":"6bbb3c3cf0753f32a2e61ac0f011a6c5e870a0814a6ed4eb9bb9dcdc58a9dca3","src/unix/bsd/freebsdlike/freebsd/x86.rs":"54311d3ebf2bb091ab22361e377e6ef9224aec2ecfe459fbfcedde4932db9c58","src/unix/bsd/freebsdlike/freebsd/x86_64.rs":"97132e2097411034271b8c927ecc94a208a361564680972a6c82998bd30a9826","src/unix/bsd/freebsdlike/mod.rs":"596def64669e2f27cffebca8e5c682ee7d576f84f8f99771c27e86ce8e818221","src/unix/bsd/mod.rs":"0c846b3a2c9390dfe090603f1ad643b107cc7e7605866c5399aa81dfea4e74bd","src/unix/bsd/netbsdlike/mod.rs":"9f55f35f8e4e26c7c63eaf67ddda0e40cc2abbabc2a68750809162e6f80144f1","src/unix/bsd/netbsdlike/netbsd/aarch64.rs":"f455848cd03cb80897776f2355dbd14bbb3aa88b1aa6aec22122ec47cdbd0f20","src/unix/bsd/netbsdlike/netbsd/arm.rs":"4c9eac982c385165f475ef3a80770fd634c6dde8fb400b3cb7e08c9bb7498af6","src/unix/bsd/netbsdlike/netbsd/mod.rs":"88617ada3bc740c5262926b9c45d7f51120d358522f051f0afdf7465edd13c29","src/unix/bsd/netbsdlike/netbsd/powerpc.rs":"183818c62dff72db6ee1d329d90ad8727628775ed5cd823c8485c6769003c91e","src/unix/bsd/netbsdlike/netbsd/sparc64.rs":"4db043327fb2615af2fb0f999d162310bea2294e7d050806e19a46b083561f16","src/unix/bsd/netbsdlike/netbsd/x86.rs":"979c4cbacbde37b9053f2c9df4b8f87d37b255436ef4a383e1e5f40ee21180d6","src/unix/bsd/netbsdlike/netbsd/x86_64.rs":"741353a1c6451efb0c7c7c486fd2cb54e3d36fd1c3dd280f6432d8858b0a5356","src/unix/bsd/netbsdlike/openbsdlike/bitrig/mod.rs":"4d2ad2e885d83ad3134add61580f49a8e58cb03284ae328273a9e2e104a4d084","src/unix/bsd/netbsdlike/openbsdlike/bitrig/x86.rs":"bd251a102bed65d5cb3459275f6ec3310fe5803ff4c9651212115548f86256d0","src/unix/bsd/netbsdlike/openbsdlike/bitrig/x86_64.rs":"6e6f15e81597d85d83ca0eeb154e4f8b8e7f9cbb6a9cfa176601f78642ef94f9","src/unix/bsd/netbsdlike/openbsdlike/mod.rs":"6320b7bc7283dbc2a07cfb8b5a793c0153535601d050f33c1ff5e4a5cc57c2be","src/unix/bsd/netbsdlike/openbsdlike/openbsd/aarch64.rs":"820092e397c7ec259cd2de8f2444083a8e57071c02d73d678701dfa7807726e9","src/unix/bsd/netbsdlike/openbsdlike/openbsd/mod.rs":"6adc0dcb1b358ef91bbeb5512e19ba6f40adc77b7d259cb11641a69426fc5ea2","src/unix/bsd/netbsdlike/openbsdlike/openbsd/x86.rs":"44b7ea81cf363777b29935da175e702cbf45ed78f7498ae57faf44aa32335085","src/unix/bsd/netbsdlike/openbsdlike/openbsd/x86_64.rs":"97956c3a53aa07a2edf8865252f980dade0d34e930adca0e870e7344d7e2b0bb","src/unix/haiku/b32.rs":"69ae47fc52c6880e85416b4744500d5655c9ec6131cb737f3b649fceaadce15a","src/unix/haiku/b64.rs":"73e64db09275a8da8d50a13cce2cfa2b136036ddf3a930d2939f337fc995900b","src/unix/haiku/mod.rs":"b00fa310b6d835cb613a02101c7b1a72c636faec316586587cb63aebdcb9de8f","src/unix/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/unix/hermit/mod.rs":"248c0b0abaa7d56a7dc3293e67a0ce70a4ef8c83638bda8aa222ad8bb0ef6801","src/unix/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/unix/mod.rs":"c5f3c6d74ac8e8ef83374b6115bab7c6e57297336a7635aa4c47462649b00dfb","src/unix/newlib/aarch64/mod.rs":"c408a990f22fb4292a824f38367e9b517e6e6f8623328397ee631cc88b3d1f7d","src/unix/newlib/arm/mod.rs":"2b6dba2e697ab9b4f4bc4dd5f28057249e9b596d1cb395a9322ec87605c4a5c4","src/unix/newlib/mod.rs":"eea5f1eb720184d9a00be966e4161fddb3733362fc349182e85b1599ba4e825f","src/unix/notbsd/android/b32/arm.rs":"3625a32c7e58cfe683a53486fbe3d42d4e28f00bea31e19cb46ed2bb0b6a140b","src/unix/notbsd/android/b32/mod.rs":"2fbe398c1fb5251dda6213741a193e50aae4622807cb255d1dd2e82b536f0c65","src/unix/notbsd/android/b32/x86.rs":"ae2b7f1d6278caddc007749bb1d09ca33f7593478a0fd7fe98b457dae86c7814","src/unix/notbsd/android/b64/aarch64.rs":"63d65629d79371814910f691672ef593d20244ee09be26f1ebe07ee6212d0163","src/unix/notbsd/android/b64/mod.rs":"90d4f6b063fd4de42fd302cbc9d9902fd99ac1d71dc48cb8bc6ad7b4c902e481","src/unix/notbsd/android/b64/x86_64.rs":"5547aef8dcbaa5a932559f34606fd8d89f6c9c15173d2b1412c12d39b3c1045f","src/unix/notbsd/android/mod.rs":"19013f41ac8ccac034eb48777a4041d69342edb9ca25d4e0bb9c5938f62eef3e","src/unix/notbsd/emscripten.rs":"9ed47a0a5020c8f72d9496f87ef0da669c1ed2b85980013100c466f18cd8a4c5","src/unix/notbsd/linux/mips/mips32.rs":"a483ddfd10765b7d5090dc21686eee8842649cd21236828a42d634114885f5f9","src/unix/notbsd/linux/mips/mips64.rs":"9fff696e3943cf206b549d1ae13fa361828e9a8454e2d5730eeaa1c172ff370d","src/unix/notbsd/linux/mips/mod.rs":"d18a40011e9090a4a2a1f27e5bd96eb2503ac352f4809648e15841083d7f3a68","src/unix/notbsd/linux/mod.rs":"d84e95d36f9477fa1aa694bf18a919ba20e65aa71539c3f3dc06e8f92acee521","src/unix/notbsd/linux/musl/b32/arm.rs":"fdf170e03812072785ec531f1ae810d0c4feb9b29d0c316681b9f7affe1262c0","src/unix/notbsd/linux/musl/b32/mips.rs":"afa4981d93f29b3fb0083a73ce1323f7dce1392f90d5cf1966b1fae10d510033","src/unix/notbsd/linux/musl/b32/mod.rs":"540928f168f145c136f9dd729ffa12b9d1838d9fe664fc642365d17d7fae648f","src/unix/notbsd/linux/musl/b32/powerpc.rs":"16c615770a96f155540511f58b5a8070c9c7a43e12bdfed83996690e7558bcb5","src/unix/notbsd/linux/musl/b32/x86.rs":"3abf1610c68c37f407cdd0f93563c3542c024219e0e8c9888266a6ee014e0c87","src/unix/notbsd/linux/musl/b64/aarch64.rs":"d98643408c2837387988f78adc95c90ad21196a6f8f879e3d33d7e8ccf612640","src/unix/notbsd/linux/musl/b64/mod.rs":"d9285cd705e2e92552a08c9aa69b810e7e1bd0e9da6edf069c678af285579661","src/unix/notbsd/linux/musl/b64/powerpc64.rs":"544d8a7f6d6d84143df8a4c3537c9a3a36bf3d338d7a1c15689b94492498d8d7","src/unix/notbsd/linux/musl/b64/x86_64.rs":"25340999290a63d564ec149532c905f59c312ec369f8806d6b15df66fa1b8857","src/unix/notbsd/linux/musl/mod.rs":"4e582af62a11e8a5235ccbeabe5b1003fc17705e606c1ed9b21b931a470681fc","src/unix/notbsd/linux/other/b32/arm.rs":"d9892f7350b2978335f734f1cd2d7fed60f0f2e66aa05bee3f69549c031f8b14","src/unix/notbsd/linux/other/b32/mod.rs":"26211878c6481861e11393625620edaa0700cac41f55f0118c0f0bd42c1b7520","src/unix/notbsd/linux/other/b32/powerpc.rs":"253fcd2f9978525285be1903cc08f3fec2dc3b12d1660a33e2995b4f6b810d1c","src/unix/notbsd/linux/other/b32/x86.rs":"49376e3ed0f3ff95c230ac20751911fe3c608dfe15c7c118b069fd7a954d8db9","src/unix/notbsd/linux/other/b64/aarch64.rs":"1124ab5e974718b94fa40ae0f1772fb1c537910ef3e018b7c6c94a38b83dd742","src/unix/notbsd/linux/other/b64/mod.rs":"63e1a3fdf5f4d1b9820934ab344c91aed5e458e7e05908535d2e942d51a08bf8","src/unix/notbsd/linux/other/b64/not_x32.rs":"fa8636fb93eab230ed53bdec0a06f5b81d6d982cc0800103563c8c1eefcdb2d9","src/unix/notbsd/linux/other/b64/powerpc64.rs":"a771d982ed8a9458e1f2f039f959b5075b22443159875ba5612eebba96661c24","src/unix/notbsd/linux/other/b64/sparc64.rs":"0caffe5924886f3aed187531dfe78189b3df40adb96782ec4b471647ff83e9b1","src/unix/notbsd/linux/other/b64/x32.rs":"06a26c5120ced30fc015c220799b67c4401be2f13fc6c7361bebd3d37ff4982d","src/unix/notbsd/linux/other/b64/x86_64.rs":"70d0cec088630502128c1c28ffef9dcc0fd9c0644f6a71b1325961d075625226","src/unix/notbsd/linux/other/mod.rs":"fe76beac87fc549b076078a0fa3e2ecd4c13b0e6e4fcb34a89e16c6584f5259b","src/unix/notbsd/linux/s390x.rs":"9e990574dfc22b6eb91bcc7c6f8297f9ad42f9183efb366a818be88180a3fac3","src/unix/notbsd/mod.rs":"a0fdecac118b70fb670f0fcccaa7f826440873448a96dd557fb1fe52cf391e89","src/unix/solaris/mod.rs":"7950a26f3eb0b7bc8e71f524cc2e05832b759fe2ce12a58e0c1957e0ec1286dc","src/unix/uclibc/mips/mips32.rs":"9739c5fb47f389a0394ef08ee30da97a3de0a1300020731a8cc0a033616011b2","src/unix/uclibc/mips/mips64.rs":"230583280bbc7b3c7fcdb61244f51fa1af5944ca127c7cf83c598fe2313713d0","src/unix/uclibc/mips/mod.rs":"2d76e6cfeb2b7f7c59231a6e099f1aed811a45659e62153aaf00c220b9488a9d","src/unix/uclibc/mod.rs":"521818507cef958376edfd8401c35db7ee1c51ee3cf106ad962002104a184cf5","src/unix/uclibc/x86_64/l4re.rs":"68fd3a833fd1f7caf784a084224f384bdbdfb8b5a14ef94c4f5155409afb3439","src/unix/uclibc/x86_64/mod.rs":"419182836aedd426a5c9e6b8667058adf86ac8f43af73ce8d00c503f8ff8f414","src/unix/uclibc/x86_64/other.rs":"f03b47842896f2f3ae6f8ebdcbcf0276454f880349d9cf00e3d304f8136893c5","src/windows/gnu.rs":"4d9033162cc6f7d245256c7b165c1ec18a741771fd9b99a55b421e8d14978599","src/windows/mod.rs":"7d74b12f71de86ee517a9cd93b1b38d0551646db0eaf526048abcbdd4bbeb250","src/windows/msvc.rs":"8f46cf66883c794175609a3b2bafaa094d0ba63eb6720ef1b84b175603f1074f"},"package":"023a4cd09b2ff695f9734c1934145a315594b7986398496841c7031a5a1bbdbd"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"e1bbf88a55a1ba674f5192fe1585b57a51751ee6c5aa7edeb87df8027c3bb912","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"40ddea9181e5374cf535aae33ad4ace80bd9b1beea8f881ff8d2b777da9489b2","build.rs":"2d1cbe87e0478de12a15a230d7ef0997020ba845b5b7d221d74cd2ce3fe0e720","src/cloudabi/aarch64.rs":"b8550bf1fd7344972aa4db29441486f39f31482d0327534981dbb75959c29114","src/cloudabi/arm.rs":"c197e2781c2839808bd6fcef219a29705b27b992d3ef920e9cf6ac96e2022bbf","src/cloudabi/mod.rs":"640375751825b8d4adf4fb089a53c4d883f265f96b07daf06f2dcf3acf6639c9","src/cloudabi/x86.rs":"33eb97f272d2201f3838ae74d444583c7de8f67856852ca375293b20bbd05636","src/cloudabi/x86_64.rs":"400d85d4fe39e26cf2e6ece9ee31c75fe9e88c4bcf4d836ca9f765c05c9c5be3","src/dox.rs":"b4b96dcba47797a6160c07f29982dd15d48fd2813d7d1000484bd9adc4a89de1","src/fuchsia/aarch64.rs":"40dce617877e606e92353491e1913711b1ecfa510bb983156f4a8460f490419e","src/fuchsia/mod.rs":"784dee21605f00a41d7cae1fc3d782646a84515a687a4952700eae51cc3840f0","src/fuchsia/x86_64.rs":"b4a3eff94dcf1ffe41e6500468ca2cff0e97ddbcc75fe079b6ac7adb1e493f56","src/lib.rs":"d456d34fc58c8d194f8f24201b4021860f61c5f125904bb8dcc954d1f813f6b7","src/macros.rs":"29d61c13523328c4742a7290dbb7d718ce198740824592e029199e9df5429b20","src/redox/mod.rs":"87cb35a988be84d45e79206f7cb5d098e8e7c5a1825cfcd0bd663453dedc3103","src/redox/net.rs":"f2e1922883f208cb46c00744da4a68feccfbec576c6981978ad404e46f818c8b","src/sgx.rs":"18d33a470d06c15b00be5a3bf45b6e51da6104ae7437f50c0eccbd3b79994389","src/switch.rs":"00365ed196519b260fc99a32f02d60b3fb1aaf0484654b19fec8c25bc1687420","src/unix/bsd/apple/b32.rs":"da60c99ee3d773f9c0767b93ccf18b67ad67576e2c58e2e86d05b23c23fd989a","src/unix/bsd/apple/b64.rs":"0cc15406c3a91abde65b28c7783ccab7801474ecc51c78dbd1d7332d4b834390","src/unix/bsd/apple/mod.rs":"b0e2d1f7fdb71d7a02bc5994c66a5ae660955227a0c0f808172a30b38fd40299","src/unix/bsd/freebsdlike/dragonfly/mod.rs":"6c69319675328d4d16939c9c2aba3fb8ca9381a8b390d18d766949307090304b","src/unix/bsd/freebsdlike/freebsd/aarch64.rs":"97132e2097411034271b8c927ecc94a208a361564680972a6c82998bd30a9826","src/unix/bsd/freebsdlike/freebsd/mod.rs":"6bbb3c3cf0753f32a2e61ac0f011a6c5e870a0814a6ed4eb9bb9dcdc58a9dca3","src/unix/bsd/freebsdlike/freebsd/x86.rs":"54311d3ebf2bb091ab22361e377e6ef9224aec2ecfe459fbfcedde4932db9c58","src/unix/bsd/freebsdlike/freebsd/x86_64.rs":"97132e2097411034271b8c927ecc94a208a361564680972a6c82998bd30a9826","src/unix/bsd/freebsdlike/mod.rs":"596def64669e2f27cffebca8e5c682ee7d576f84f8f99771c27e86ce8e818221","src/unix/bsd/mod.rs":"0c846b3a2c9390dfe090603f1ad643b107cc7e7605866c5399aa81dfea4e74bd","src/unix/bsd/netbsdlike/mod.rs":"9f55f35f8e4e26c7c63eaf67ddda0e40cc2abbabc2a68750809162e6f80144f1","src/unix/bsd/netbsdlike/netbsd/aarch64.rs":"f455848cd03cb80897776f2355dbd14bbb3aa88b1aa6aec22122ec47cdbd0f20","src/unix/bsd/netbsdlike/netbsd/arm.rs":"4c9eac982c385165f475ef3a80770fd634c6dde8fb400b3cb7e08c9bb7498af6","src/unix/bsd/netbsdlike/netbsd/mod.rs":"88617ada3bc740c5262926b9c45d7f51120d358522f051f0afdf7465edd13c29","src/unix/bsd/netbsdlike/netbsd/powerpc.rs":"183818c62dff72db6ee1d329d90ad8727628775ed5cd823c8485c6769003c91e","src/unix/bsd/netbsdlike/netbsd/sparc64.rs":"4db043327fb2615af2fb0f999d162310bea2294e7d050806e19a46b083561f16","src/unix/bsd/netbsdlike/netbsd/x86.rs":"979c4cbacbde37b9053f2c9df4b8f87d37b255436ef4a383e1e5f40ee21180d6","src/unix/bsd/netbsdlike/netbsd/x86_64.rs":"741353a1c6451efb0c7c7c486fd2cb54e3d36fd1c3dd280f6432d8858b0a5356","src/unix/bsd/netbsdlike/openbsdlike/bitrig/mod.rs":"4d2ad2e885d83ad3134add61580f49a8e58cb03284ae328273a9e2e104a4d084","src/unix/bsd/netbsdlike/openbsdlike/bitrig/x86.rs":"bd251a102bed65d5cb3459275f6ec3310fe5803ff4c9651212115548f86256d0","src/unix/bsd/netbsdlike/openbsdlike/bitrig/x86_64.rs":"6e6f15e81597d85d83ca0eeb154e4f8b8e7f9cbb6a9cfa176601f78642ef94f9","src/unix/bsd/netbsdlike/openbsdlike/mod.rs":"6320b7bc7283dbc2a07cfb8b5a793c0153535601d050f33c1ff5e4a5cc57c2be","src/unix/bsd/netbsdlike/openbsdlike/openbsd/aarch64.rs":"820092e397c7ec259cd2de8f2444083a8e57071c02d73d678701dfa7807726e9","src/unix/bsd/netbsdlike/openbsdlike/openbsd/mod.rs":"6adc0dcb1b358ef91bbeb5512e19ba6f40adc77b7d259cb11641a69426fc5ea2","src/unix/bsd/netbsdlike/openbsdlike/openbsd/x86.rs":"44b7ea81cf363777b29935da175e702cbf45ed78f7498ae57faf44aa32335085","src/unix/bsd/netbsdlike/openbsdlike/openbsd/x86_64.rs":"97956c3a53aa07a2edf8865252f980dade0d34e930adca0e870e7344d7e2b0bb","src/unix/haiku/b32.rs":"69ae47fc52c6880e85416b4744500d5655c9ec6131cb737f3b649fceaadce15a","src/unix/haiku/b64.rs":"73e64db09275a8da8d50a13cce2cfa2b136036ddf3a930d2939f337fc995900b","src/unix/haiku/mod.rs":"b00fa310b6d835cb613a02101c7b1a72c636faec316586587cb63aebdcb9de8f","src/unix/hermit/aarch64.rs":"86048676e335944c37a63d0083d0f368ae10ceccefeed9debb3bbe08777fc682","src/unix/hermit/mod.rs":"248c0b0abaa7d56a7dc3293e67a0ce70a4ef8c83638bda8aa222ad8bb0ef6801","src/unix/hermit/x86_64.rs":"ab832b7524e5fb15c49ff7431165ab1a37dc4667ae0b58e8306f4c539bfa110c","src/unix/mod.rs":"3231196aa748186ad21063bb43c69b95c0d90ba32c5547c72f7c6ed343a100b7","src/unix/newlib/aarch64/mod.rs":"c408a990f22fb4292a824f38367e9b517e6e6f8623328397ee631cc88b3d1f7d","src/unix/newlib/arm/mod.rs":"2b6dba2e697ab9b4f4bc4dd5f28057249e9b596d1cb395a9322ec87605c4a5c4","src/unix/newlib/mod.rs":"eea5f1eb720184d9a00be966e4161fddb3733362fc349182e85b1599ba4e825f","src/unix/notbsd/android/b32/arm.rs":"3625a32c7e58cfe683a53486fbe3d42d4e28f00bea31e19cb46ed2bb0b6a140b","src/unix/notbsd/android/b32/mod.rs":"2fbe398c1fb5251dda6213741a193e50aae4622807cb255d1dd2e82b536f0c65","src/unix/notbsd/android/b32/x86.rs":"ae2b7f1d6278caddc007749bb1d09ca33f7593478a0fd7fe98b457dae86c7814","src/unix/notbsd/android/b64/aarch64.rs":"63d65629d79371814910f691672ef593d20244ee09be26f1ebe07ee6212d0163","src/unix/notbsd/android/b64/mod.rs":"90d4f6b063fd4de42fd302cbc9d9902fd99ac1d71dc48cb8bc6ad7b4c902e481","src/unix/notbsd/android/b64/x86_64.rs":"5547aef8dcbaa5a932559f34606fd8d89f6c9c15173d2b1412c12d39b3c1045f","src/unix/notbsd/android/mod.rs":"19013f41ac8ccac034eb48777a4041d69342edb9ca25d4e0bb9c5938f62eef3e","src/unix/notbsd/emscripten.rs":"9ed47a0a5020c8f72d9496f87ef0da669c1ed2b85980013100c466f18cd8a4c5","src/unix/notbsd/linux/mips/mips32.rs":"a483ddfd10765b7d5090dc21686eee8842649cd21236828a42d634114885f5f9","src/unix/notbsd/linux/mips/mips64.rs":"9fff696e3943cf206b549d1ae13fa361828e9a8454e2d5730eeaa1c172ff370d","src/unix/notbsd/linux/mips/mod.rs":"d18a40011e9090a4a2a1f27e5bd96eb2503ac352f4809648e15841083d7f3a68","src/unix/notbsd/linux/mod.rs":"d84e95d36f9477fa1aa694bf18a919ba20e65aa71539c3f3dc06e8f92acee521","src/unix/notbsd/linux/musl/b32/arm.rs":"fdf170e03812072785ec531f1ae810d0c4feb9b29d0c316681b9f7affe1262c0","src/unix/notbsd/linux/musl/b32/mips.rs":"afa4981d93f29b3fb0083a73ce1323f7dce1392f90d5cf1966b1fae10d510033","src/unix/notbsd/linux/musl/b32/mod.rs":"540928f168f145c136f9dd729ffa12b9d1838d9fe664fc642365d17d7fae648f","src/unix/notbsd/linux/musl/b32/powerpc.rs":"16c615770a96f155540511f58b5a8070c9c7a43e12bdfed83996690e7558bcb5","src/unix/notbsd/linux/musl/b32/x86.rs":"3abf1610c68c37f407cdd0f93563c3542c024219e0e8c9888266a6ee014e0c87","src/unix/notbsd/linux/musl/b64/aarch64.rs":"d98643408c2837387988f78adc95c90ad21196a6f8f879e3d33d7e8ccf612640","src/unix/notbsd/linux/musl/b64/mod.rs":"d9285cd705e2e92552a08c9aa69b810e7e1bd0e9da6edf069c678af285579661","src/unix/notbsd/linux/musl/b64/powerpc64.rs":"544d8a7f6d6d84143df8a4c3537c9a3a36bf3d338d7a1c15689b94492498d8d7","src/unix/notbsd/linux/musl/b64/x86_64.rs":"25340999290a63d564ec149532c905f59c312ec369f8806d6b15df66fa1b8857","src/unix/notbsd/linux/musl/mod.rs":"4e582af62a11e8a5235ccbeabe5b1003fc17705e606c1ed9b21b931a470681fc","src/unix/notbsd/linux/other/b32/arm.rs":"d9892f7350b2978335f734f1cd2d7fed60f0f2e66aa05bee3f69549c031f8b14","src/unix/notbsd/linux/other/b32/mod.rs":"26211878c6481861e11393625620edaa0700cac41f55f0118c0f0bd42c1b7520","src/unix/notbsd/linux/other/b32/powerpc.rs":"253fcd2f9978525285be1903cc08f3fec2dc3b12d1660a33e2995b4f6b810d1c","src/unix/notbsd/linux/other/b32/x86.rs":"49376e3ed0f3ff95c230ac20751911fe3c608dfe15c7c118b069fd7a954d8db9","src/unix/notbsd/linux/other/b64/aarch64.rs":"1124ab5e974718b94fa40ae0f1772fb1c537910ef3e018b7c6c94a38b83dd742","src/unix/notbsd/linux/other/b64/mod.rs":"63e1a3fdf5f4d1b9820934ab344c91aed5e458e7e05908535d2e942d51a08bf8","src/unix/notbsd/linux/other/b64/not_x32.rs":"fa8636fb93eab230ed53bdec0a06f5b81d6d982cc0800103563c8c1eefcdb2d9","src/unix/notbsd/linux/other/b64/powerpc64.rs":"a771d982ed8a9458e1f2f039f959b5075b22443159875ba5612eebba96661c24","src/unix/notbsd/linux/other/b64/sparc64.rs":"0caffe5924886f3aed187531dfe78189b3df40adb96782ec4b471647ff83e9b1","src/unix/notbsd/linux/other/b64/x32.rs":"06a26c5120ced30fc015c220799b67c4401be2f13fc6c7361bebd3d37ff4982d","src/unix/notbsd/linux/other/b64/x86_64.rs":"70d0cec088630502128c1c28ffef9dcc0fd9c0644f6a71b1325961d075625226","src/unix/notbsd/linux/other/mod.rs":"fe76beac87fc549b076078a0fa3e2ecd4c13b0e6e4fcb34a89e16c6584f5259b","src/unix/notbsd/linux/s390x.rs":"9e990574dfc22b6eb91bcc7c6f8297f9ad42f9183efb366a818be88180a3fac3","src/unix/notbsd/mod.rs":"a0fdecac118b70fb670f0fcccaa7f826440873448a96dd557fb1fe52cf391e89","src/unix/solaris/mod.rs":"7950a26f3eb0b7bc8e71f524cc2e05832b759fe2ce12a58e0c1957e0ec1286dc","src/unix/uclibc/mips/mips32.rs":"9739c5fb47f389a0394ef08ee30da97a3de0a1300020731a8cc0a033616011b2","src/unix/uclibc/mips/mips64.rs":"230583280bbc7b3c7fcdb61244f51fa1af5944ca127c7cf83c598fe2313713d0","src/unix/uclibc/mips/mod.rs":"2d76e6cfeb2b7f7c59231a6e099f1aed811a45659e62153aaf00c220b9488a9d","src/unix/uclibc/mod.rs":"521818507cef958376edfd8401c35db7ee1c51ee3cf106ad962002104a184cf5","src/unix/uclibc/x86_64/l4re.rs":"68fd3a833fd1f7caf784a084224f384bdbdfb8b5a14ef94c4f5155409afb3439","src/unix/uclibc/x86_64/mod.rs":"419182836aedd426a5c9e6b8667058adf86ac8f43af73ce8d00c503f8ff8f414","src/unix/uclibc/x86_64/other.rs":"f03b47842896f2f3ae6f8ebdcbcf0276454f880349d9cf00e3d304f8136893c5","src/windows/gnu.rs":"4d9033162cc6f7d245256c7b165c1ec18a741771fd9b99a55b421e8d14978599","src/windows/mod.rs":"7d74b12f71de86ee517a9cd93b1b38d0551646db0eaf526048abcbdd4bbeb250","src/windows/msvc.rs":"8f46cf66883c794175609a3b2bafaa094d0ba63eb6720ef1b84b175603f1074f"},"package":"023a4cd09b2ff695f9734c1934145a315594b7986398496841c7031a5a1bbdbd"}
diff --git a/user/rust/0030-liblibc-linkage.patch b/user/rust/0030-liblibc-linkage.patch
deleted file mode 100644
index 4daa0e48f..000000000
--- a/user/rust/0030-liblibc-linkage.patch
+++ /dev/null
@@ -1,17 +0,0 @@
-diff --git a/src/liblibc/src/unix/mod.rs b/src/liblibc/src/unix/mod.rs
-index 9c68178a..1ae270e9 100644
---- a/src/liblibc/src/unix/mod.rs
-+++ b/src/liblibc/src/unix/mod.rs
-@@ -287,11 +287,7 @@ cfg_if! {
- // cargo build, don't pull in anything extra as the libstd dep
- // already pulls in all libs.
- } else if #[cfg(target_env = "musl")] {
-- #[cfg_attr(feature = "stdbuild",
-- link(name = "c", kind = "static",
-- cfg(target_feature = "crt-static")))]
-- #[cfg_attr(feature = "stdbuild",
-- link(name = "c", cfg(not(target_feature = "crt-static"))))]
-+ #[link(name = "c")]
- extern {}
- } else if #[cfg(target_os = "emscripten")] {
- #[link(name = "c")]
diff --git a/user/rust/0031-liblibc-1b130d4c349d.patch b/user/rust/0031-liblibc-1b130d4c349d.patch
deleted file mode 100644
index 29087689b..000000000
--- a/user/rust/0031-liblibc-1b130d4c349d.patch
+++ /dev/null
@@ -1,126 +0,0 @@
-From 1b130d4c349d35da9aeb07ce020cbf96755e8a6b Mon Sep 17 00:00:00 2001
-From: Samuel Holland <samuel@sholland.org>
-Date: Thu, 1 Nov 2018 03:37:18 +0000
-Subject: [PATCH] Add F_RDLCK/F_WRLCK/F_UNLCK to several platforms
-
-These are used by the flock wrapper in rustc_data_structures. The
-constants were already present in x86_64-linux-gnu and BSD (since
-4928bd986907).
----
- src/unix/haiku/mod.rs | 4 ++++
- src/unix/notbsd/android/mod.rs | 4 ++++
- src/unix/notbsd/linux/musl/mod.rs | 4 ++++
- src/unix/notbsd/linux/other/b32/mod.rs | 4 ++++
- src/unix/notbsd/linux/other/b64/aarch64.rs | 4 ++++
- src/unix/notbsd/linux/other/b64/powerpc64.rs | 4 ++++
- src/unix/notbsd/linux/other/b64/sparc64.rs | 4 ++++
- 7 files changed, 28 insertions(+)
-
-diff --git a/src/liblibc/src/unix/haiku/mod.rs b/src/liblibc/src/unix/haiku/mod.rs
-index 2ef36f8f3..307a5a40d 100644
---- a/src/liblibc/src/unix/haiku/mod.rs
-+++ b/src/liblibc/src/unix/haiku/mod.rs
-@@ -351,6 +351,10 @@ pub const F_SETLK: ::c_int = 0x0080;
- pub const F_SETLKW: ::c_int = 0x0100;
- pub const F_DUPFD_CLOEXEC: ::c_int = 0x0200;
-
-+pub const F_RDLCK: ::c_int = 0x0040;
-+pub const F_UNLCK: ::c_int = 0x0200;
-+pub const F_WRLCK: ::c_int = 0x0400;
-+
- pub const AT_FDCWD: ::c_int = -1;
- pub const AT_SYMLINK_NOFOLLOW: ::c_int = 0x01;
- pub const AT_SYMLINK_FOLLOW: ::c_int = 0x02;
-diff --git a/src/liblibc/src/unix/notbsd/android/mod.rs b/src/liblibc/src/unix/notbsd/android/mod.rs
-index 3e10acd50..5b750b2b1 100644
---- a/src/liblibc/src/unix/notbsd/android/mod.rs
-+++ b/src/liblibc/src/unix/notbsd/android/mod.rs
-@@ -744,6 +744,10 @@ pub const F_SETOWN: ::c_int = 8;
- pub const F_SETLK: ::c_int = 6;
- pub const F_SETLKW: ::c_int = 7;
-
-+pub const F_RDLCK: ::c_int = 0;
-+pub const F_WRLCK: ::c_int = 1;
-+pub const F_UNLCK: ::c_int = 2;
-+
- pub const TCGETS: ::c_int = 0x5401;
- pub const TCSETS: ::c_int = 0x5402;
- pub const TCSETSW: ::c_int = 0x5403;
-diff --git a/src/liblibc/src/unix/notbsd/linux/musl/mod.rs b/src/liblibc/src/unix/notbsd/linux/musl/mod.rs
-index 10d61ebeb..9c669d9b4 100644
---- a/src/liblibc/src/unix/notbsd/linux/musl/mod.rs
-+++ b/src/liblibc/src/unix/notbsd/linux/musl/mod.rs
-@@ -121,6 +121,10 @@ pub const ECOMM: ::c_int = 70;
- pub const EPROTO: ::c_int = 71;
- pub const EDOTDOT: ::c_int = 73;
-
-+pub const F_RDLCK: ::c_int = 0;
-+pub const F_WRLCK: ::c_int = 1;
-+pub const F_UNLCK: ::c_int = 2;
-+
- pub const SA_NODEFER: ::c_int = 0x40000000;
- pub const SA_RESETHAND: ::c_int = 0x80000000;
- pub const SA_RESTART: ::c_int = 0x10000000;
-diff --git a/src/liblibc/src/unix/notbsd/linux/other/b32/mod.rs b/src/liblibc/src/unix/notbsd/linux/other/b32/mod.rs
-index 5b0142ab8..d078f7537 100644
---- a/src/liblibc/src/unix/notbsd/linux/other/b32/mod.rs
-+++ b/src/liblibc/src/unix/notbsd/linux/other/b32/mod.rs
-@@ -251,6 +251,10 @@ pub const F_SETOWN: ::c_int = 8;
- pub const F_SETLK: ::c_int = 6;
- pub const F_SETLKW: ::c_int = 7;
-
-+pub const F_RDLCK: ::c_int = 0;
-+pub const F_WRLCK: ::c_int = 1;
-+pub const F_UNLCK: ::c_int = 2;
-+
- pub const SFD_NONBLOCK: ::c_int = 0x0800;
-
- pub const TIOCEXCL: ::c_ulong = 0x540C;
-diff --git a/src/liblibc/src/unix/notbsd/linux/other/b64/aarch64.rs b/src/liblibc/src/unix/notbsd/linux/other/b64/aarch64.rs
-index 2ba27a72b..171d904ca 100644
---- a/src/liblibc/src/unix/notbsd/linux/other/b64/aarch64.rs
-+++ b/src/liblibc/src/unix/notbsd/linux/other/b64/aarch64.rs
-@@ -340,6 +340,10 @@ pub const F_SETOWN: ::c_int = 8;
- pub const F_SETLK: ::c_int = 6;
- pub const F_SETLKW: ::c_int = 7;
-
-+pub const F_RDLCK: ::c_int = 0;
-+pub const F_WRLCK: ::c_int = 1;
-+pub const F_UNLCK: ::c_int = 2;
-+
- pub const SFD_NONBLOCK: ::c_int = 0x0800;
-
- pub const TIOCEXCL: ::c_ulong = 0x540C;
-diff --git a/src/liblibc/src/unix/notbsd/linux/other/b64/powerpc64.rs b/src/liblibc/src/unix/notbsd/linux/other/b64/powerpc64.rs
-index 9dd91f0fd..1813413b0 100644
---- a/src/liblibc/src/unix/notbsd/linux/other/b64/powerpc64.rs
-+++ b/src/liblibc/src/unix/notbsd/linux/other/b64/powerpc64.rs
-@@ -327,6 +327,10 @@ pub const F_SETOWN: ::c_int = 8;
- pub const F_SETLK: ::c_int = 6;
- pub const F_SETLKW: ::c_int = 7;
-
-+pub const F_RDLCK: ::c_int = 0;
-+pub const F_WRLCK: ::c_int = 1;
-+pub const F_UNLCK: ::c_int = 2;
-+
- pub const SFD_NONBLOCK: ::c_int = 0x0800;
-
- pub const TIOCEXCL: ::c_ulong = 0x540C;
-diff --git a/src/liblibc/src/unix/notbsd/linux/other/b64/sparc64.rs b/src/liblibc/src/unix/notbsd/linux/other/b64/sparc64.rs
-index 34438a735..a3251ec7b 100644
---- a/src/liblibc/src/unix/notbsd/linux/other/b64/sparc64.rs
-+++ b/src/liblibc/src/unix/notbsd/linux/other/b64/sparc64.rs
-@@ -305,6 +305,10 @@ pub const F_SETOWN: ::c_int = 6;
- pub const F_SETLK: ::c_int = 8;
- pub const F_SETLKW: ::c_int = 9;
-
-+pub const F_RDLCK: ::c_int = 1;
-+pub const F_WRLCK: ::c_int = 2;
-+pub const F_UNLCK: ::c_int = 3;
-+
- pub const SFD_NONBLOCK: ::c_int = 0x4000;
-
- pub const TIOCEXCL: ::c_ulong = 0x2000740d;
---
-2.18.0
-
diff --git a/user/rust/APKBUILD b/user/rust/APKBUILD
index 8534e1112..9a37b97c3 100644
--- a/user/rust/APKBUILD
+++ b/user/rust/APKBUILD
@@ -3,10 +3,10 @@
# Contributor: Jeizsm <jeizsm@gmail.com>
# Maintainer: Samuel Holland <samuel@sholland.org>
pkgname=rust
-pkgver=1.31.1
-_bootcargover=0.31.0
-_bootver=1.30.1
-_llvmver=6
+pkgver=1.33.0
+_bootcargover=0.33.0
+_bootver=1.32.0
+_llvmver=7
pkgrel=0
pkgdesc="The Rust Programming Language"
url="https://www.rust-lang.org"
@@ -50,25 +50,15 @@ source="https://static.rust-lang.org/dist/rustc-$pkgver-src.tar.xz
0004-Require-static-native-libraries-when-linking-static-.patch
0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch
0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch
- 0007-Add-powerpc-unknown-linux-musl-target.patch
- 0008-Fix-powerpc64-ELFv2-big-endian-struct-passing-ABI.patch
- 0009-Use-the-ELFv2-ABI-on-powerpc64-musl.patch
- 0010-Add-powerpc64-unknown-linux-musl-target.patch
- 0011-rustc_data_structures-use-libc-types-constants-in-fl.patch
- 0012-runtest-Fix-proc-macro-tests-on-musl-hosts.patch
- 0013-Fix-double_check-tests-on-big-endian-targets.patch
- 0014-test-invalid_const_promotion-Accept-SIGTRAP-as-a-val.patch
- 0015-test-linkage-visibility-Ensure-symbols-are-visible-t.patch
- 0016-x.py-Use-python3-instead-of-python.patch
- 0017-test-target-feature-gate-Only-run-on-relevant-target.patch
- 0018-test-use-extern-for-plugins-Don-t-assume-multilib.patch
- 0019-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch
- 0020-Ignore-broken-and-non-applicable-tests.patch
- 0021-Link-stage-2-tools-dynamically-to-libstd.patch
- 0022-Move-debugger-scripts-to-usr-share-rust.patch
- 0023-Add-foxkit-target-specs.patch
- 0030-liblibc-linkage.patch
- 0031-liblibc-1b130d4c349d.patch
+ 0007-runtest-Fix-proc-macro-tests-on-musl-hosts.patch
+ 0008-test-enum-debug-Correct-minimum-LLVM-version.patch
+ 0009-test-use-extern-for-plugins-Don-t-assume-multilib.patch
+ 0010-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch
+ 0011-Ignore-broken-and-non-applicable-tests.patch
+ 0012-Link-stage-2-tools-dynamically-to-libstd.patch
+ 0013-Move-debugger-scripts-to-usr-share-rust.patch
+ 0014-Add-foxkit-target-specs.patch
+ 0030-libc-linkage.patch
0040-rls-atomics.patch
"
builddir="$srcdir/rustc-$pkgver-src"
@@ -117,10 +107,10 @@ build() {
debuginfo-lines = true
debuginfo-only-std = true
debuginfo-tools = true
- use-jemalloc = false
backtrace = true
channel = "stable"
rpath = false
+ jemalloc = false
[target.$CTARGET]
cc = "$CTARGET-gcc"
cxx = "$CTARGET-g++"
@@ -132,20 +122,18 @@ build() {
LIBGIT2_SYS_USE_PKG_CONFIG=1 \
LLVM_LINK_SHARED=1 \
RUST_BACKTRACE=1 \
- ./x.py build -j ${JOBS:-2}
+ python3 x.py build -j ${JOBS:-2}
}
check() {
cd "$builddir"
RUST_BACKTRACE=1 \
- ./x.py test -j ${JOBS:-2} --no-doc --no-fail-fast \
+ python3 x.py test -j ${JOBS:-2} --no-doc --no-fail-fast \
src/test/codegen \
src/test/codegen-units \
src/test/compile-fail \
- src/test/compile-fail-fulldeps \
src/test/incremental \
- src/test/incremental-fulldeps \
src/test/mir-opt \
src/test/pretty \
src/test/run-fail \
@@ -165,7 +153,7 @@ check() {
package() {
cd "$builddir"
- DESTDIR="$pkgdir" ./x.py install
+ DESTDIR="$pkgdir" python3 x.py install
rm "$pkgdir"/usr/lib/*.so \
"$pkgdir"/usr/lib/rustlib/components \
@@ -301,33 +289,23 @@ _mv() {
mkdir -p "$dest"
mv "$@"
}
-sha512sums="d6688711452295c41468af214f036499074d82fbeedc3f03560275219c2fae910e623e38e3210176d86f78ba5533ea098da5ff21532c8ad1a5aac132bb74ee02 rustc-1.31.1-src.tar.xz
-16596561ff87c66f64f4b40602184355d3695822b9b9e6e0e929f340664718698e6f5b378c9162b48f9cb180be5fd8e583e5e51a1c57c1cd55dcc80f468cb509 cargo-0.31.0-powerpc-foxkit-linux-musl.tar.xz
-cdfc7a3d51d66338f31007daafab95570e4e465983add99db7a45f1a3e601588b19e1257fe3e7fcc6b7104ba445add2fa1cda71325a9ab6d196b4ad0dcef2fb5 rust-std-1.30.1-powerpc-foxkit-linux-musl.tar.xz
-190ed9d84513a229f389fc5d1bf0b8e22202bd3b9802330ee2844c436ae13700bdaded091498be4f2c4c65b2f6ef55c1e2db41a918747ac60e8e11e331be2c29 rustc-1.30.1-powerpc-foxkit-linux-musl.tar.xz
-acab7c5ffa73cd83cf3ae23e69d2c05ccc3ca03aa4d5a15179af76f2e75b7c5a1bc8d2969c0a9a4b5ad36a50a8cce283e62db878797be6d0da252b414de004cd 0001-Don-t-pass-CFLAGS-to-the-C-compiler.patch
-f9773277e91e91d0bd6749890b8d332a86522927d90548af02892c6d01b8fbb3b234993eec8e5b8d6a3b6f1cec310a112e274f47f70d30829ca61a0da8d45bb5 0002-Fix-LLVM-build.patch
-9e87964d41e14f70acc1961a0162bbf3ebca73f1c2b2842c3b1c8b65776466c9299d6974fc6b94a4e06547ae8ce638c28cdedf24eeb2616976f9dd727b989c13 0003-Allow-rustdoc-to-work-when-cross-compiling-on-musl.patch
-b08e247785b6b94c24e9921665c0a80a21d6802cdcfb67cf381ff8c674388156fc6d88f8206c2d1eed152f4ad3333bf4a16cdd1c3376b97500f96c437e967ecb 0004-Require-static-native-libraries-when-linking-static-.patch
-39c2f63aaf567e539ef803afa4e02d9f373df33167306dabb303044854c95bcc52a8b0fe02a2ff186d3d37fb25ac6ceee2bea25beddb0b8d0510b41dcbfb781a 0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch
-80a48ba90dce672bd47dba4358e705f0585b17af6a9236887c0bf79d229fe7250b6d5cf54c2630c132721be890cf6098376aac408ac2d3feb5fd8b5ec0678446 0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch
-71fc70a70ee42fed66ea7abcccffd46c02b877f716ccb366d68c8b252290366aa3d4819bdf6ce43cf8327088ff9ff0cac9444d9fab8b4d6645dfc1a018bd5285 0007-Add-powerpc-unknown-linux-musl-target.patch
-54e7b24d3da6905d02252dbd3011a16c52c1fae6922997dcccf034b5230025e05bb0d029a5fe043c186a39f8908e8e44f9a857db943f1c49d804bcb97cca71b0 0008-Fix-powerpc64-ELFv2-big-endian-struct-passing-ABI.patch
-088d509715de07000c815005fd513d4bb43c5a273dab1cec54c3617c1d342d1eff7d4d2edc48f20b80aec8b3e380f7480d7336b9c7dc4b89b78a31a88e04eb09 0009-Use-the-ELFv2-ABI-on-powerpc64-musl.patch
-f39578109d1591759aff29cc1a4127f1456c60ed05993704574a9c1e2331243baa85e3f66ba4634f57efd964b1d11a4539a3ab3a1fdab9c3163d91c658e04bf2 0010-Add-powerpc64-unknown-linux-musl-target.patch
-11ee5b81d354b36369eb5d7f57bcde04a350f1ca5e6034d04eae6c5064dad37b05a26ca44f99499f3f7664c11beba0aafbc1adb6807a21bcce878717659d04b3 0011-rustc_data_structures-use-libc-types-constants-in-fl.patch
-8ff473da3fd668ee36826e206245c7b6d20c671b4016c9a9d5e1d22af05f8d4d724543942a53ab38dd9c7bb6e4c3eff33daec4a84b0ee333b25b007f610b4b49 0012-runtest-Fix-proc-macro-tests-on-musl-hosts.patch
-1ce5797d0b9a0eb24d203a437b999ac9691cf741086645864b3a0dcf134f31c31e68e6e1618f44d3bd3c3743c067713c472e93c69f551d14f2e0233b6f10d2cd 0013-Fix-double_check-tests-on-big-endian-targets.patch
-f72c803cd531e6c85c37993214a89a8e49d776a6f50405956d6b453cc9036e596eeac4da36c564fc9388908237fe37291dfc6a0d00ed1d5f83186371f8ad6c07 0014-test-invalid_const_promotion-Accept-SIGTRAP-as-a-val.patch
-861402c0468438428aefdbcec13c82678ba280816f5482a5707805caa37ddabf8df95d894dd0ab99d3e0b09de3b0a602cb29e62afd479cedf6112950ed174f9a 0015-test-linkage-visibility-Ensure-symbols-are-visible-t.patch
-1dec944575e8bb6fc7ad910e2dae4d694a2571b36652a27a9b7ee11c40018d39d41f89bf827d909a67ff4d16d0970d1c753b24dfd5dbd0362028dcf12f60dd61 0016-x.py-Use-python3-instead-of-python.patch
-9633256006c28106d9b5e560e4cd4ee72de700a0f54d0c5fd1576760213e674f3c123b83a547bbdf9cdfde92ba6d10edf3cb598f3ad67db6e20b11d24fb49d54 0017-test-target-feature-gate-Only-run-on-relevant-target.patch
-3ab705f45cc75507c5a8fbf7c8520c7231d7d1870e5dc5da3d822e50f28a6f242ba31e712f98f72ff96143010812212cdc99f8b1b197a680bbe249dfb9779d74 0018-test-use-extern-for-plugins-Don-t-assume-multilib.patch
-572669f940c93b57b1f02161685bd3e35e2757bfd1f1d2e6ca5fd7d6e80c6733917a2c28205c4bc8741eae01bd790fe14996ba857884494a6084f4670819e43d 0019-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch
-e1cd1bc3a363ce214e887c4c61e233d388c3b75155ae4a84e8381bdccee5897ddffc992bc22dcdb6316b64516236ae4b07d06ff331e9e031c524196b0e5c40d5 0020-Ignore-broken-and-non-applicable-tests.patch
-b05ef2ddd53262e46a4a6148db25b0903347e531f94a1ec194aafd31e49b4559564edbe4842f82b457d9c700723474fa29119f57969c0208199d5f2126ce6509 0021-Link-stage-2-tools-dynamically-to-libstd.patch
-34ce4f9227a0e58113e8871db78681d1a0180f4af9de08b17c7ba15d13f47d189e2b0c66cb203ddfee8ac29131b6e5bc7ba8fd699d63847ab2a55973503788bc 0022-Move-debugger-scripts-to-usr-share-rust.patch
-6204dfc16ca27bda4c77a5f9a3805af9995e24c8b9cda6d45d0860c3625e937ec6e871a3ccec6357ab4cd52b0e73450897960102bb1e6f1ce80ae95780a8dbea 0023-Add-foxkit-target-specs.patch
-f94abe390798b968bccfba1c908ab5895220ed0d167314b790a9c28b10cac81fb2e3b08760b4e8551ad5a5d02cd052d3d0d647ad8f8634e97f4d3b0470a21c42 0030-liblibc-linkage.patch
-875f0b676c61c1ec67894fc7866233869ced8fc975960b8b99585bad370066f9a6bf591590a745c73a0bcc93b9f7fda74a703829c50d04b80846aecc92ae725a 0031-liblibc-1b130d4c349d.patch
+sha512sums="3291e4e19f75f44c81e6fcf4c01edc7c9d326eca43722381231abcf2e99f4314059ba59a29b79f5511ad9421c358c45e8fe18584d6954d17fe2aabad0f9d9147 rustc-1.33.0-src.tar.xz
+24caa7730dd41f61b97bb909d499f928cebab948e814b786e7d3836e92188ef27da48147532e063b3cec424e41a4374581274f21dc79bb95cf310f92c8609579 cargo-0.33.0-powerpc64-foxkit-linux-musl.tar.xz
+c6d237d6daaae1e8a342691917bdc76fa6a59f5bd99a1df38baebf6d5c1ddc6f8460ac6e1db8168120accb9c5f90f652ea013b4d865c486c72e90244be753ebd rust-std-1.32.0-powerpc64-foxkit-linux-musl.tar.xz
+8ff3bca961e146593f17e1581d20eaeae99768a4fac65258cf5ef80ef69b427a4980fd345b69b831028ca3e882d52d2ebd42a2eccab2c812da09e2bc28960ffe rustc-1.32.0-powerpc64-foxkit-linux-musl.tar.xz
+a5e03a091b224b9cd3cd6d8a0b1ac70389f374c95ff09f55b01e739b3a98a8551d2208b2750137ebaacbb8628d1bb54a96aaeb938c961e4eb33a071594bce1dc 0001-Don-t-pass-CFLAGS-to-the-C-compiler.patch
+bf387aad3d117e30621bd1914c3a713a710db0639413d57802c3a9bba49bf422dedf5e03b4a3073ec1efbd571cbcd10b6a0b3f98f2b019555581db5599b84f44 0002-Fix-LLVM-build.patch
+b931204afaf3ac4c73f41c29586b0cebdb06867bf401717a473c2367ebf25be1a42827f7ccccea50c15006c744cb73fc7ea9627816d0a2b53421546a4e478436 0003-Allow-rustdoc-to-work-when-cross-compiling-on-musl.patch
+da362880f89a2df8223ed255d44803e9d7d8115666946fe3a4fe4a78030cd8ee40d6aabc1ceaab110e62e9a67af7631729348f71f2d514458ef0a60e6decff3b 0004-Require-static-native-libraries-when-linking-static-.patch
+1bdc441cb068697c7f6984e16bdde28bdd17b7a2517c33b666851d55f1d987a76e4d7f034b7221538c663c0a33603d8ac1f60ffb167a4a5d01677a7b214ab21d 0005-Remove-nostdlib-and-musl_root-from-musl-targets.patch
+03580acc8391a7b32fb2a7880c6a924838803e8ce4771e465f24b7a09263b9b0ca7ae4fd84770c262cf25734d49ab0f838d57104b181224b901e26f25dc9d2e9 0006-Prefer-libgcc_eh-over-libunwind-for-musl.patch
+8e06768d0723aca0dd292942381255061b83505da89bc45c8c8c280dcaf3184e1b05b3744559fc3ed576f9a726bd593d892a71e8fd81bfebee62bb86d62f5531 0007-runtest-Fix-proc-macro-tests-on-musl-hosts.patch
+52138d4cc238ee44eafabfe281e3378298d510c9ffd9f123a5934f33b7e68b6fa262605c044bfce5943b18983d0145bdf33b9c168e030fee892247ef7ce4ec19 0008-test-enum-debug-Correct-minimum-LLVM-version.patch
+34be92c7ce327f488b7a933e7232890615b3d07c08a09028d712c01aa53f89025b47aaaccdc5c2e1d52948eb967b40ea2c576b29b0102feadc7153d6d9984f79 0009-test-use-extern-for-plugins-Don-t-assume-multilib.patch
+47b025edccb7e2d7c42353c3a8665714a3b5661a7d832a9bd71bf9bff75ffe7c6e90a9d55b765dadec54ce29c2539fa894801487718ac1053bd4528b413a487d 0010-test-sysroot-crates-are-unstable-Fix-test-when-rpath.patch
+c32c5d807ec23f5e88f47f83220a117ff3f048cec4731e50d0cae082c971ea03993a6e4f95225046a0b06b406608ee97fb85671ad8f86e5da4970bba210388d6 0011-Ignore-broken-and-non-applicable-tests.patch
+6b089c21b3b756d5d8b3198f4a914d8a2f90216ce25d491f7caeb0a637d0f729e9e5184dfa3e07b50570df5791576d1aa4bf2a1f33183c67bc2ba6ba36c0752e 0012-Link-stage-2-tools-dynamically-to-libstd.patch
+d9568a3e21b60831b193c4a9554756d0be77d0c63d06d6c3ff1c7fd35822aaa3b2e2cf8acd29fe07eb698ebf48673245e052a6ddfa4d39bb687eea8c4c236e50 0013-Move-debugger-scripts-to-usr-share-rust.patch
+f1269241ccddf4594752b97e6856dad98ca987d2491b1bd95f6123352eec52f3acb3f17dae1c2a42be55e25db4a7e0d6bb1fb2b468cc139ad14559e0753a4c87 0014-Add-foxkit-target-specs.patch
+7197503a3bf1cdd6cb5ff3350add2682cc64f7d33f7a4b270c53321401e439de5c24ddb2f7c5a76bb3488c8b766237f674fedaa9202c480f53595d7837214f0f 0030-libc-linkage.patch
41ab9d6f361ee2fba267c9836c1a382dc86101d1bfb59cd942be8e8fc526ad65d845c9395cba7f760808d345d107f32e75af9057d67c450b8fbdd8a73636fb2e 0040-rls-atomics.patch"
diff --git a/user/wireguard-module-power8-64k/APKBUILD b/user/wireguard-module-power8-64k/APKBUILD
index 0e08c993f..ed1fa1ba3 100644
--- a/user/wireguard-module-power8-64k/APKBUILD
+++ b/user/wireguard-module-power8-64k/APKBUILD
@@ -3,7 +3,7 @@
# KEEP THIS IN SYNC with the other wireguard-module packages.
_kflavour="-power8-64k"
_kver="4.14.88-mc13"
-pkgver=0.0.20190227
+pkgver=0.0.20190406
pkgrel=0
_pkgname="wireguard-module$_kflavour"
pkgname="$_pkgname-$_kver"
@@ -29,4 +29,4 @@ package() {
make -C src DEPMOD=true KERNELDIR="/usr/src/linux-$_kver$_kflavour" INSTALL_MOD_PATH="$pkgdir" module-install
}
-sha512sums="0c27353f27d7ae758cab84a02f63863681baa5eab2b64aa494be30c411b4dccc3af030b410dbfb72342fb5ea34be04f2d3b36ad0bb6a418d9f389ac34384eca3 WireGuard-0.0.20190227.tar.xz"
+sha512sums="e3035030299f5a6ae3ac0c2e35dc2d54745cda8f31185b21c6590f1ddcc9c7a83bf7b71cc71f6d48ed6dc8f2b31f3c704f8fa85685d4113f4c560f7eba7b7d5e WireGuard-0.0.20190406.tar.xz"
diff --git a/user/wireguard-module-power8/APKBUILD b/user/wireguard-module-power8/APKBUILD
index 6ea11b28c..260d22029 100644
--- a/user/wireguard-module-power8/APKBUILD
+++ b/user/wireguard-module-power8/APKBUILD
@@ -3,7 +3,7 @@
# KEEP THIS IN SYNC with the other wireguard-module packages.
_kflavour="-power8"
_kver="4.14.88-mc13"
-pkgver=0.0.20190227
+pkgver=0.0.20190406
pkgrel=0
_pkgname="wireguard-module$_kflavour"
pkgname="$_pkgname-$_kver"
@@ -29,4 +29,4 @@ package() {
make -C src DEPMOD=true KERNELDIR="/usr/src/linux-$_kver$_kflavour" INSTALL_MOD_PATH="$pkgdir" module-install
}
-sha512sums="0c27353f27d7ae758cab84a02f63863681baa5eab2b64aa494be30c411b4dccc3af030b410dbfb72342fb5ea34be04f2d3b36ad0bb6a418d9f389ac34384eca3 WireGuard-0.0.20190227.tar.xz"
+sha512sums="e3035030299f5a6ae3ac0c2e35dc2d54745cda8f31185b21c6590f1ddcc9c7a83bf7b71cc71f6d48ed6dc8f2b31f3c704f8fa85685d4113f4c560f7eba7b7d5e WireGuard-0.0.20190406.tar.xz"
diff --git a/user/wireguard-module/APKBUILD b/user/wireguard-module/APKBUILD
index b85581303..4bb25ae3e 100644
--- a/user/wireguard-module/APKBUILD
+++ b/user/wireguard-module/APKBUILD
@@ -3,7 +3,7 @@
# KEEP THIS IN SYNC with the other wireguard-module packages.
_kflavour=""
_kver="4.14.88-mc13"
-pkgver=0.0.20190227
+pkgver=0.0.20190406
pkgrel=0
_pkgname="wireguard-module$_kflavour"
pkgname="$_pkgname-$_kver"
@@ -29,4 +29,4 @@ package() {
make -C src DEPMOD=true KERNELDIR="/usr/src/linux-$_kver$_kflavour" INSTALL_MOD_PATH="$pkgdir" module-install
}
-sha512sums="0c27353f27d7ae758cab84a02f63863681baa5eab2b64aa494be30c411b4dccc3af030b410dbfb72342fb5ea34be04f2d3b36ad0bb6a418d9f389ac34384eca3 WireGuard-0.0.20190227.tar.xz"
+sha512sums="e3035030299f5a6ae3ac0c2e35dc2d54745cda8f31185b21c6590f1ddcc9c7a83bf7b71cc71f6d48ed6dc8f2b31f3c704f8fa85685d4113f4c560f7eba7b7d5e WireGuard-0.0.20190406.tar.xz"
diff --git a/user/wireguard-tools/APKBUILD b/user/wireguard-tools/APKBUILD
index c14471f7f..493da9d37 100644
--- a/user/wireguard-tools/APKBUILD
+++ b/user/wireguard-tools/APKBUILD
@@ -2,7 +2,7 @@
# Maintainer: Luis Ressel <aranea@aixah.de>
pkgname=wireguard-tools
_pkgreal=WireGuard
-pkgver=0.0.20190227
+pkgver=0.0.20190406
pkgrel=0
pkgdesc="Userland tools for the WireGuard VPN"
url="https://www.wireguard.com/"
@@ -43,4 +43,4 @@ _patch() {
}
-sha512sums="0c27353f27d7ae758cab84a02f63863681baa5eab2b64aa494be30c411b4dccc3af030b410dbfb72342fb5ea34be04f2d3b36ad0bb6a418d9f389ac34384eca3 WireGuard-0.0.20190227.tar.xz"
+sha512sums="e3035030299f5a6ae3ac0c2e35dc2d54745cda8f31185b21c6590f1ddcc9c7a83bf7b71cc71f6d48ed6dc8f2b31f3c704f8fa85685d4113f4c560f7eba7b7d5e WireGuard-0.0.20190406.tar.xz"
diff --git a/user/wpa_supplicant/APKBUILD b/user/wpa_supplicant/APKBUILD
index ba297f885..240fb474c 100644
--- a/user/wpa_supplicant/APKBUILD
+++ b/user/wpa_supplicant/APKBUILD
@@ -8,8 +8,8 @@ url="https://w1.fi/wpa_supplicant/"
arch="all"
license="BSD-3-Clause AND Public-Domain AND (GPL-2.0+ OR BSD-3-Clause)"
options="!check" # No test suite
-subpackages="$pkgname-doc $pkgname-openrc wpa_gui"
-depends="dbus"
+subpackages="$pkgname-dbus::noarch $pkgname-doc $pkgname-openrc wpa_gui"
+depends=""
makedepends="linux-headers openssl-dev dbus-dev libnl3-dev qt5-qtbase-dev"
source="http://w1.fi/releases/$pkgname-$pkgver.tar.gz
rebased-v2.6-0001-hostapd-Avoid-key-reinstallation-in-FT-handshake.patch
@@ -72,15 +72,6 @@ package() {
done
install -Dm755 eapol_test "$pkgdir"/sbin/eapol_test
- # dbus
- cd dbus
- install -d "$pkgdir"/etc/dbus-1/system.d
- install -m644 dbus-wpa_supplicant.conf \
- "$pkgdir"/etc/dbus-1/system.d/wpa_supplicant.conf
- install -d "$pkgdir"/usr/share/dbus-1/system-services
- install fi.epitest.hostap.WPASupplicant.service \
- "$pkgdir"/usr/share/dbus-1/system-services
- install -d "$pkgdir"/var/run/wpa_supplicant
install -Dm755 "$srcdir"/wpa_supplicant.initd \
"$pkgdir"/etc/init.d/wpa_supplicant
install -Dm644 "$srcdir"/wpa_supplicant.confd \
@@ -91,6 +82,20 @@ package() {
ln -s ../sbin/wpa_cli "$pkgdir"/bin/wpa_cli
}
+dbus() {
+ pkgdesc="$pkgdesc (dbus services)"
+ depends="$pkgname dbus"
+ install_if="$pkgname=$pkgver-r$pkgrel dbus"
+
+ cd "$builddir/wpa_supplicant/dbus"
+ install -d "$subpkgdir"/etc/dbus-1/system.d
+ install -m644 dbus-wpa_supplicant.conf \
+ "$subpkgdir"/etc/dbus-1/system.d/wpa_supplicant.conf
+ install -d "$subpkgdir"/usr/share/dbus-1/system-services
+ install fi.epitest.hostap.WPASupplicant.service \
+ "$subpkgdir"/usr/share/dbus-1/system-services
+}
+
wpa_gui() {
pkgdesc="Graphical User Interface for wpa_supplicant"
depends="$pkgname"