diff options
author | Rich Felker <dalias@aerifal.cx> | 2014-07-19 23:37:21 -0400 |
---|---|---|
committer | Rich Felker <dalias@aerifal.cx> | 2014-07-19 23:37:21 -0400 |
commit | f61be1f875a2758509d6e9e2cf6f1d9603b28b65 (patch) | |
tree | 9a79fbe9842035762a5437ca3eeb4c41a293bdfa /arch/mips | |
parent | ddddec106fd17c3aca3287005d21e92f742aa9d4 (diff) | |
download | musl-f61be1f875a2758509d6e9e2cf6f1d9603b28b65.tar.gz musl-f61be1f875a2758509d6e9e2cf6f1d9603b28b65.tar.bz2 musl-f61be1f875a2758509d6e9e2cf6f1d9603b28b65.tar.xz musl-f61be1f875a2758509d6e9e2cf6f1d9603b28b65.zip |
fix mips struct stat dev_t members for big endian
the mips version of this structure on the kernel side wrongly has
32-bit type rather than 64-bit type. fortunately there is adjacent
padding to bring it up to 64 bits, and on little-endian, this allows
us to treat the adjacent kernel st_dev and st_pad0[0] as as single
64-bit dev_t. however, on big endian, such treatment results in the
upper and lower 32-bit parts of the dev_t value being swapped. for the
purpose of just comparing st_dev values this did not break anything,
but it precluded actually processing the device numbers as major/minor
values.
since the broken kernel behavior that needs to be worked around is
isolated to one arch, I put the workarounds in syscall_arch.h rather
than adding a stat fixup path in the common code. on little endian
mips, the added code optimizes out completely.
the changes necessary were incompatible with the way the __asm_syscall
macro was factored so I just removed it and flattened the individual
__syscallN functions. this arguably makes the code easier to read and
understand, anyway.
Diffstat (limited to 'arch/mips')
-rw-r--r-- | arch/mips/syscall_arch.h | 101 |
1 files changed, 81 insertions, 20 deletions
diff --git a/arch/mips/syscall_arch.h b/arch/mips/syscall_arch.h index e62c3398..1aa2c812 100644 --- a/arch/mips/syscall_arch.h +++ b/arch/mips/syscall_arch.h @@ -7,29 +7,44 @@ long (__syscall)(long, ...); #define SYSCALL_RLIM_INFINITY (-1UL/2) -#ifndef __clang__ +#if _MIPSEL || __MIPSEL || __MIPSEL__ +#define __stat_fix(st) ((st),(void)0) +#else +#include <sys/stat.h> +static inline void __stat_fix(long p) +{ + struct stat *st = (struct stat *)p; + st->st_dev >>= 32; + st->st_rdev >>= 32; +} +#endif -#define __asm_syscall(...) do { \ - register long r2 __asm__("$2"); \ - __asm__ __volatile__ ( \ - "addu $2,$0,%2 ; syscall" \ - : "=&r"(r2), "=r"(r7) : "ir"(n), __VA_ARGS__, "0"(r2), "1"(r7) \ - : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", \ - "$14", "$15", "$24", "$25", "hi", "lo", "memory"); \ - return r7 ? -r2 : r2; \ - } while (0) +#ifndef __clang__ static inline long __syscall0(long n) { register long r7 __asm__("$7"); - __asm_syscall("i"(0)); + register long r2 __asm__("$2"); + __asm__ __volatile__ ( + "addu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7) + : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", + "$14", "$15", "$24", "$25", "hi", "lo", "memory"); + return r7 ? -r2 : r2; } static inline long __syscall1(long n, long a) { register long r4 __asm__("$4") = a; register long r7 __asm__("$7"); - __asm_syscall("r"(r4)); + register long r2 __asm__("$2"); + __asm__ __volatile__ ( + "addu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7), + "r"(r4) + : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", + "$14", "$15", "$24", "$25", "hi", "lo", "memory"); + return r7 ? -r2 : r2; } static inline long __syscall2(long n, long a, long b) @@ -37,7 +52,16 @@ static inline long __syscall2(long n, long a, long b) register long r4 __asm__("$4") = a; register long r5 __asm__("$5") = b; register long r7 __asm__("$7"); - __asm_syscall("r"(r4), "r"(r5)); + register long r2 __asm__("$2"); + __asm__ __volatile__ ( + "addu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7), + "r"(r4), "r"(r5) + : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", + "$14", "$15", "$24", "$25", "hi", "lo", "memory"); + if (r7) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + return r2; } static inline long __syscall3(long n, long a, long b, long c) @@ -46,7 +70,16 @@ static inline long __syscall3(long n, long a, long b, long c) register long r5 __asm__("$5") = b; register long r6 __asm__("$6") = c; register long r7 __asm__("$7"); - __asm_syscall("r"(r4), "r"(r5), "r"(r6)); + register long r2 __asm__("$2"); + __asm__ __volatile__ ( + "addu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7), + "r"(r4), "r"(r5), "r"(r6) + : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", + "$14", "$15", "$24", "$25", "hi", "lo", "memory"); + if (r7) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + return r2; } static inline long __syscall4(long n, long a, long b, long c, long d) @@ -55,7 +88,17 @@ static inline long __syscall4(long n, long a, long b, long c, long d) register long r5 __asm__("$5") = b; register long r6 __asm__("$6") = c; register long r7 __asm__("$7") = d; - __asm_syscall("r"(r4), "r"(r5), "r"(r6)); + register long r2 __asm__("$2"); + __asm__ __volatile__ ( + "addu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7), + "r"(r4), "r"(r5), "r"(r6) + : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", + "$14", "$15", "$24", "$25", "hi", "lo", "memory"); + if (r7) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + if (n == SYS_fstatat) __stat_fix(c); + return r2; } #else @@ -72,27 +115,45 @@ static inline long __syscall1(long n, long a) static inline long __syscall2(long n, long a, long b) { - return (__syscall)(n, a, b); + long r2 = (__syscall)(n, a, b); + if (r2 > -4096UL) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + return r2; } static inline long __syscall3(long n, long a, long b, long c) { - return (__syscall)(n, a, b, c); + long r2 = (__syscall)(n, a, b, c); + if (r2 > -4096UL) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + return r2; } static inline long __syscall4(long n, long a, long b, long c, long d) { - return (__syscall)(n, a, b, c, d); + long r2 = (__syscall)(n, a, b, c, d); + if (r2 > -4096UL) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + if (n == SYS_fstatat) __stat_fix(c); + return r2; } #endif static inline long __syscall5(long n, long a, long b, long c, long d, long e) { - return (__syscall)(n, a, b, c, d, e); + long r2 = (__syscall)(n, a, b, c, d, e); + if (r2 > -4096UL) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + if (n == SYS_fstatat) __stat_fix(c); + return r2; } static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f) { - return (__syscall)(n, a, b, c, d, e, f); + long r2 = (__syscall)(n, a, b, c, d, e, f); + if (r2 > -4096UL) return -r2; + if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b); + if (n == SYS_fstatat) __stat_fix(c); + return r2; } |