summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorRich Felker <dalias@aerifal.cx>2012-04-24 06:36:50 -0400
committerRich Felker <dalias@aerifal.cx>2012-04-24 06:36:50 -0400
commitf34d0ea511e552851c8c6148fb113816f41e6759 (patch)
tree51b1dbdf2058697ab51782b96c6317f2c6051d5a /src
parent1b0ce9af6d2aa7b92edaf3e9c631cb635bae22bd (diff)
downloadmusl-f34d0ea511e552851c8c6148fb113816f41e6759.tar.gz
musl-f34d0ea511e552851c8c6148fb113816f41e6759.tar.bz2
musl-f34d0ea511e552851c8c6148fb113816f41e6759.tar.xz
musl-f34d0ea511e552851c8c6148fb113816f41e6759.zip
new internal locking primitive; drop spinlocks
we use priority inheritance futexes if possible so that the library cannot hit internal priority inversion deadlocks in the presence of realtime priority scheduling (full support to be added later).
Diffstat (limited to 'src')
-rw-r--r--src/internal/libc.h3
-rw-r--r--src/thread/__lock.c33
2 files changed, 29 insertions, 7 deletions
diff --git a/src/internal/libc.h b/src/internal/libc.h
index 39a18658..d6797f90 100644
--- a/src/internal/libc.h
+++ b/src/internal/libc.h
@@ -45,10 +45,11 @@ extern struct __libc *__libc_loc(void) __attribute__((const));
/* Designed to avoid any overhead in non-threaded processes */
void __lock(volatile int *);
+void __unlock(volatile int *);
int __lockfile(FILE *);
void __unlockfile(FILE *);
#define LOCK(x) (libc.threads_minus_1 ? (__lock(x),1) : ((void)(x),1))
-#define UNLOCK(x) (*(volatile int *)(x)=0)
+#define UNLOCK(x) (libc.threads_minus_1 ? (__unlock(x),1) : ((void)(x),1))
void __synccall(void (*)(void *), void *);
void __synccall_wait(void);
diff --git a/src/thread/__lock.c b/src/thread/__lock.c
index d1717956..d1734096 100644
--- a/src/thread/__lock.c
+++ b/src/thread/__lock.c
@@ -1,11 +1,32 @@
#include "pthread_impl.h"
+void __lock_2(volatile int *l)
+{
+ if (!__syscall(SYS_futex, l, FUTEX_LOCK_PI, 0, 0))
+ return;
+ int old, tid = __pthread_self()->tid;
+ while ((old = a_cas(l, 0, tid))) {
+ a_cas(l, old, old|INT_MIN);
+ __syscall(SYS_futex, l, FUTEX_WAIT, old|INT_MIN, 0);
+ }
+}
+
void __lock(volatile int *l)
{
- int spins=10000;
- /* Do not use futexes because we insist that unlocking is a simple
- * assignment to optimize non-pathological code with no contention. */
- while (a_swap(l, 1))
- if (spins) spins--, a_spin();
- else __syscall(SYS_sched_yield);
+ if (a_cas(l, 0, __pthread_self()->tid)) __lock_2(l);
+}
+
+void __unlock_2(volatile int *l)
+{
+ if (__syscall(SYS_futex, l, FUTEX_UNLOCK_PI)) {
+ *l = 0;
+ __syscall(SYS_futex, l, FUTEX_WAKE, 1);
+ }
+}
+
+void __unlock(volatile int *l)
+{
+ int old = *l;
+ if (!(old & INT_MIN) && a_cas(l, old, 0)==old) return;
+ __unlock_2(l);
}