summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorRich Felker <dalias@aerifal.cx>2015-10-15 12:04:48 -0400
committerRich Felker <dalias@aerifal.cx>2015-10-15 12:04:48 -0400
commit74483c5955a632af5d9a4783cc2b541764450551 (patch)
tree6ee843126dd7aa888d619c728dc20ccd04981d77 /arch
parent11da520c7af5b190e69c0906c6e95000c4e59f33 (diff)
downloadmusl-74483c5955a632af5d9a4783cc2b541764450551.tar.gz
musl-74483c5955a632af5d9a4783cc2b541764450551.tar.bz2
musl-74483c5955a632af5d9a4783cc2b541764450551.tar.xz
musl-74483c5955a632af5d9a4783cc2b541764450551.zip
mark arm thread-pointer-loading inline asm as volatile
this builds on commits a603a75a72bb469c6be4963ed1b55fabe675fe15 and 0ba35d69c0e77b225ec640d2bd112ff6d9d3b2af to ensure that a compiler cannot conclude that it's valid to reorder the asm to a point before the thread pointer is set up, or to treat the inline function as if it were declared with attribute((const)). other archs already use volatile asm for thread pointer loading.
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/pthread_arch.h6
1 files changed, 3 insertions, 3 deletions
diff --git a/arch/arm/pthread_arch.h b/arch/arm/pthread_arch.h
index 5cbb209e..4a4dd09e 100644
--- a/arch/arm/pthread_arch.h
+++ b/arch/arm/pthread_arch.h
@@ -4,7 +4,7 @@
static inline pthread_t __pthread_self()
{
char *p;
- __asm__( "mrc p15,0,%0,c13,c0,3" : "=r"(p) );
+ __asm__ __volatile__ ( "mrc p15,0,%0,c13,c0,3" : "=r"(p) );
return (void *)(p+8-sizeof(struct pthread));
}
@@ -14,10 +14,10 @@ static inline pthread_t __pthread_self()
{
#ifdef __clang__
char *p;
- __asm__( "bl __a_gettp\n\tmov %0,r0" : "=r"(p) : : "cc", "r0", "lr" );
+ __asm__ __volatile__ ( "bl __a_gettp\n\tmov %0,r0" : "=r"(p) : : "cc", "r0", "lr" );
#else
register char *p __asm__("r0");
- __asm__( "bl __a_gettp" : "=r"(p) : : "cc", "lr" );
+ __asm__ __volatile__ ( "bl __a_gettp" : "=r"(p) : : "cc", "lr" );
#endif
return (void *)(p+8-sizeof(struct pthread));
}