summaryrefslogtreecommitdiff
path: root/system/libarchive/disable-unaligned-access-arm32-xxhash.patch
diff options
context:
space:
mode:
Diffstat (limited to 'system/libarchive/disable-unaligned-access-arm32-xxhash.patch')
-rw-r--r--system/libarchive/disable-unaligned-access-arm32-xxhash.patch12
1 files changed, 12 insertions, 0 deletions
diff --git a/system/libarchive/disable-unaligned-access-arm32-xxhash.patch b/system/libarchive/disable-unaligned-access-arm32-xxhash.patch
new file mode 100644
index 000000000..0b24a8854
--- /dev/null
+++ b/system/libarchive/disable-unaligned-access-arm32-xxhash.patch
@@ -0,0 +1,12 @@
+diff -ur a/libarchive/xxhash.c b/libarchive/xxhash.c
+--- a/libarchive/xxhash.c 2022-05-20 18:47:43.907673368 +0000
++++ b/libarchive/xxhash.c 2022-05-20 18:49:43.019255133 +0000
+@@ -46,7 +46,7 @@
+ ** If you know your target CPU supports unaligned memory access, you want to force this option manually to improve performance.
+ ** You can also enable this parameter if you know your input data will always be aligned (boundaries of 4, for U32).
+ */
+-#if defined(__ARM_FEATURE_UNALIGNED) || defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
++#if (defined(__ARM_FEATURE_UNALIGNED) && defined(__aarch64__)) || defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
+ # define XXH_USE_UNALIGNED_ACCESS 1
+ #endif
+