diff --git a/userspace/libraries/LibC/include/bits/pthread_types.h b/userspace/libraries/LibC/include/bits/pthread_types.h index e101b2e1..71415929 100644 --- a/userspace/libraries/LibC/include/bits/pthread_types.h +++ b/userspace/libraries/LibC/include/bits/pthread_types.h @@ -70,16 +70,16 @@ __BEGIN_DECLS #endif #undef __need_pthread_rwlockattr_t -#if !defined(__pthread_spinlock_t_defined) && (defined(__need_all_types) || defined(__need_pthread_spinlock_t)) - #define __pthread_spinlock_t_defined 1 - typedef int pthread_spinlock_t; -#endif -#undef __need_pthread_spinlock_t - #if !defined(__pthread_t_defined) && (defined(__need_all_types) || defined(__need_pthread_t)) #define __pthread_t_defined 1 typedef pid_t pthread_t; #endif #undef __need_pthread_t +#if !defined(__pthread_spinlock_t_defined) && (defined(__need_all_types) || defined(__need_pthread_spinlock_t)) + #define __pthread_spinlock_t_defined 1 + typedef pthread_t pthread_spinlock_t; +#endif +#undef __need_pthread_spinlock_t + __END_DECLS diff --git a/userspace/libraries/LibC/pthread.cpp b/userspace/libraries/LibC/pthread.cpp index 37300001..3d0941f2 100644 --- a/userspace/libraries/LibC/pthread.cpp +++ b/userspace/libraries/LibC/pthread.cpp @@ -1,5 +1,8 @@ #include +#include +#include +#include #include #include #include @@ -52,3 +55,62 @@ pthread_t pthread_self(void) { return syscall(SYS_PTHREAD_SELF); } + +static inline BAN::Atomic& pthread_spin_get_atomic(pthread_spinlock_t* lock) +{ + static_assert(sizeof(pthread_spinlock_t) <= sizeof(BAN::Atomic)); + static_assert(alignof(pthread_spinlock_t) <= alignof(BAN::Atomic)); + return *reinterpret_cast*>(lock); +} + +int pthread_spin_destroy(pthread_spinlock_t* lock) +{ + pthread_spin_get_atomic(lock).~Atomic(); + return 0; +} + +int pthread_spin_init(pthread_spinlock_t* lock, int pshared) +{ + (void)pshared; + new (lock) BAN::Atomic(); + pthread_spin_get_atomic(lock) = false; + return 0; +} + +int pthread_spin_lock(pthread_spinlock_t* lock) +{ + auto& atomic = pthread_spin_get_atomic(lock); + + const pthread_t tid = pthread_self(); + ASSERT(atomic.load(BAN::MemoryOrder::memory_order_relaxed) != tid); + + pthread_t expected = 0; + while (!atomic.compare_exchange(expected, tid, BAN::MemoryOrder::memory_order_acquire)) + { + sched_yield(); + expected = 0; + } + + return 0; +} + +int pthread_spin_trylock(pthread_spinlock_t* lock) +{ + auto& atomic = pthread_spin_get_atomic(lock); + + const pthread_t tid = pthread_self(); + ASSERT(atomic.load(BAN::MemoryOrder::memory_order_relaxed) != tid); + + pthread_t expected = 0; + if (atomic.compare_exchange(expected, tid, BAN::MemoryOrder::memory_order_acquire)) + return 0; + return EBUSY; +} + +int pthread_spin_unlock(pthread_spinlock_t* lock) +{ + auto& atomic = pthread_spin_get_atomic(lock); + ASSERT(atomic.load(BAN::MemoryOrder::memory_order_relaxed) == pthread_self()); + atomic.store(0, BAN::MemoryOrder::memory_order_release); + return 0; +}