39 #ifndef OPENIMAGEIO_THREAD_H
40 #define OPENIMAGEIO_THREAD_H
50 # define WIN32_LEAN_AND_MEAN
57 #include <boost/version.hpp>
58 #if defined(__GNUC__) && (BOOST_VERSION == 104500)
61 #pragma GCC diagnostic ignored "-Wunused-variable"
64 #include <boost/thread.hpp>
65 #include <boost/thread/tss.hpp>
66 #include <boost/version.hpp>
68 #if defined(__GNUC__) && (BOOST_VERSION == 104500)
70 #pragma GCC diagnostic error "-Wunused-variable"
80 # include <tbb/atomic.h>
81 # include <tbb/spin_mutex.h>
82 # define USE_TBB_ATOMIC 1
83 # define USE_TBB_SPINLOCK 1
85 # define USE_TBB_ATOMIC 0
86 # define USE_TBB_SPINLOCK 0
90 #if defined(_MSC_VER) && !USE_TBB
93 # pragma intrinsic (_InterlockedExchangeAdd)
94 # pragma intrinsic (_InterlockedCompareExchange)
95 # pragma intrinsic (_InterlockedCompareExchange64)
96 # pragma intrinsic (_ReadWriteBarrier)
98 # pragma intrinsic(_InterlockedExchangeAdd64)
101 # if defined(_WIN32_WINNT) && _WIN32_WINNT <= 0x0501
103 InterlockedExchangeAdd64 (
volatile long long *Addend,
long long Value)
108 }
while (_InterlockedCompareExchange64(Addend, Old + Value, Old) != Old);
114 #if defined(__GNUC__) && (defined(_GLIBCXX_ATOMIC_BUILTINS) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 401))
115 #if !defined(__FreeBSD__) && !defined(__powerpc__) || defined(__x86_64__)
116 #define USE_GCC_ATOMICS
131 void lock_shared () { }
132 void unlock_shared () { }
147 class null_thread_specific_ptr {
149 typedef void (*destructor_t)(T *);
150 null_thread_specific_ptr (destructor_t dest=NULL)
151 : m_ptr(NULL), m_dest(dest) { }
152 ~null_thread_specific_ptr () { reset (NULL); }
153 T *
get () {
return m_ptr; }
154 void reset (T *newptr=NULL) {
177 class thread_specific_ptr {
179 typedef void (*destructor_t)(T *);
180 thread_specific_ptr (destructor_t dest=NULL)
181 : m_ptr(NULL), m_dest(dest) { }
182 ~thread_specific_ptr () { reset (NULL); }
183 T *
get () {
return m_ptr; }
184 void reset (T *newptr=NULL) {
199 typedef null_mutex mutex;
200 typedef null_mutex recursive_mutex;
201 typedef null_lock<mutex> lock_guard;
202 typedef null_lock<recursive_mutex> recursive_lock_guard;
209 typedef boost::mutex mutex;
210 typedef boost::recursive_mutex recursive_mutex;
211 typedef boost::lock_guard< boost::mutex > lock_guard;
212 typedef boost::lock_guard< boost::recursive_mutex > recursive_lock_guard;
213 using boost::thread_specific_ptr;
222 atomic_exchange_and_add (
volatile int *at,
int x)
224 #ifdef USE_GCC_ATOMICS
225 return __sync_fetch_and_add ((
int *)at, x);
227 atomic<int> *a = (atomic<int> *)at;
228 return a->fetch_and_add (x);
229 #elif defined(_MSC_VER)
231 return _InterlockedExchangeAdd ((
volatile LONG *)at, x);
232 #elif defined (__powerpc__)
238 # error No atomics on this platform.
245 atomic_exchange_and_add (
volatile long long *at,
long long x)
247 #ifdef USE_GCC_ATOMICS
248 return __sync_fetch_and_add (at, x);
250 atomic<long long> *a = (atomic<long long> *)at;
251 return a->fetch_and_add (x);
252 #elif defined(_MSC_VER)
255 return _InterlockedExchangeAdd64 ((
volatile LONGLONG *)at, x);
257 return InterlockedExchangeAdd64 ((
volatile LONGLONG *)at, x);
259 #elif defined (__powerpc__)
265 # error No atomics on this platform.
278 atomic_compare_and_exchange (
volatile int *at,
int compareval,
int newval)
280 #ifdef USE_GCC_ATOMICS
281 return __sync_bool_compare_and_swap (at, compareval, newval);
283 atomic<int> *a = (atomic<int> *)at;
284 return a->compare_and_swap (newval, compareval) == newval;
285 #elif defined(_MSC_VER)
286 return (_InterlockedCompareExchange ((
volatile LONG *)at, newval, compareval) == compareval);
287 #elif defined(__powerpc__)
288 return ((*at == compareval) ? (*at = newval), 1 : 0);
290 # error No atomics on this platform.
297 atomic_compare_and_exchange (
volatile long long *at,
long long compareval,
long long newval)
299 #ifdef USE_GCC_ATOMICS
300 return __sync_bool_compare_and_swap (at, compareval, newval);
302 atomic<long long> *a = (atomic<long long> *)at;
303 return a->compare_and_swap (newval, compareval) == newval;
304 #elif defined(_MSC_VER)
305 return (_InterlockedCompareExchange64 ((
volatile LONGLONG *)at, newval, compareval) == compareval);
306 #elif defined(__PPC__)
307 return ((*at == compareval) ? (*at = newval), 1 : 0);
309 # error No atomics on this platform.
320 #if defined(__GNUC__)
322 #elif defined(_MSC_VER)
325 # error No yield on this platform.
335 #if defined(__GNUC__) && (defined(__x86_64__) || defined(__i386__))
336 for (
int i = 0; i < delay; ++i)
337 __asm__ __volatile__(
"pause;");
339 #elif defined(__GNUC__) && (defined(__arm__) || defined(__s390__))
340 for (
int i = 0; i < delay; ++i)
341 __asm__ __volatile__(
"NOP;");
346 #elif defined(_MSC_VER)
347 for (
int i = 0; i < delay; ++i) {
357 for (
int i = 0; i < delay; ++i) ;
364 class atomic_backoff {
366 atomic_backoff () : m_count(1) { }
396 atomic (T val=0) : m_val(val) { }
402 T operator() ()
const {
return atomic_exchange_and_add (&m_val, 0); }
406 operator T()
const {
return atomic_exchange_and_add (&m_val, 0); }
410 T fast_value ()
const {
return m_val; }
418 if (atomic_compare_and_exchange (&m_val, result, x))
426 T operator++ () {
return atomic_exchange_and_add (&m_val, 1) + 1; }
430 T operator++ (
int) {
return atomic_exchange_and_add (&m_val, 1); }
434 T operator-- () {
return atomic_exchange_and_add (&m_val, -1) - 1; }
438 T operator-- (
int) {
return atomic_exchange_and_add (&m_val, -1); }
442 T operator+= (T x) {
return atomic_exchange_and_add (&m_val, x) + x; }
446 T operator-= (T x) {
return atomic_exchange_and_add (&m_val, -x) - x; }
448 bool bool_compare_and_swap (T compareval, T newval) {
449 return atomic_compare_and_exchange (&m_val, compareval, newval);
452 T operator= (
const atomic &x) {
462 volatile mutable T m_val;
465 atomic (atomic
const &);
467 } __attribute__((aligned(8)));
479 typedef int atomic_int;
480 typedef long long atomic_ll;
484 typedef atomic<int> atomic_int;
485 typedef atomic<long long> atomic_ll;
493 typedef null_mutex spin_mutex;
494 typedef null_lock<spin_mutex> spin_lock;
496 #elif USE_TBB_SPINLOCK
499 typedef tbb::spin_mutex spin_mutex;
500 typedef tbb::spin_mutex::scoped_lock spin_lock;
532 spin_mutex (
void) { m_locked = 0; }
534 ~spin_mutex (
void) { }
538 spin_mutex (
const spin_mutex &) { m_locked = 0; }
542 const spin_mutex& operator= (
const spin_mutex&) {
return *
this; }
550 atomic_backoff backoff;
555 while (! OIIO_UNLIKELY(try_lock())) {
571 #if defined(__GNUC__) && (defined(__x86_64__) || defined(__i386__))
573 __asm__ __volatile__(
"": : :
"memory");
579 #elif defined(_MSC_VER)
585 *(atomic_int *)&m_locked = 0;
594 return (*(atomic_int *)&m_locked).compare_and_swap (0, 1) == 0;
595 #elif defined(__GNUC__)
598 return __sync_lock_test_and_set (&m_locked, 1) == 0;
601 return atomic_compare_and_exchange (&m_locked, 0, 1);
609 lock_guard (spin_mutex &fm) : m_fm(fm) { m_fm.lock(); }
610 ~lock_guard () { m_fm.unlock(); }
613 lock_guard(
const lock_guard& other);
614 lock_guard& operator = (
const lock_guard& other);
619 volatile int m_locked;
623 typedef spin_mutex::lock_guard spin_lock;
634 class spin_rw_mutex {
638 spin_rw_mutex (
void) { m_readers = 0; }
640 ~spin_rw_mutex (
void) { }
644 spin_rw_mutex (
const spin_rw_mutex &) { m_readers = 0; }
648 const spin_rw_mutex& operator= (
const spin_rw_mutex&) {
return *
this; }
663 void read_unlock () {
675 while (*(
volatile int *)&m_readers > 0)
681 void write_unlock () {
688 class read_lock_guard {
690 read_lock_guard (spin_rw_mutex &fm) : m_fm(fm) { m_fm.read_lock(); }
691 ~read_lock_guard () { m_fm.read_unlock(); }
694 read_lock_guard(
const read_lock_guard& other);
695 read_lock_guard& operator = (
const read_lock_guard& other);
696 spin_rw_mutex & m_fm;
701 class write_lock_guard {
703 write_lock_guard (spin_rw_mutex &fm) : m_fm(fm) { m_fm.write_lock(); }
704 ~write_lock_guard () { m_fm.write_unlock(); }
707 write_lock_guard(
const write_lock_guard& other);
708 write_lock_guard& operator = (
const write_lock_guard& other);
709 spin_rw_mutex & m_fm;
715 char pad1_[OIIO_CACHE_LINE_SIZE-
sizeof(spin_mutex)];
717 atomic_int m_readers;
718 char pad2_[OIIO_CACHE_LINE_SIZE-
sizeof(atomic_int)];
722 typedef spin_rw_mutex::read_lock_guard spin_rw_read_lock;
723 typedef spin_rw_mutex::write_lock_guard spin_rw_write_lock;
729 #endif // OPENIMAGEIO_THREAD_H