19 #ifndef INC_KY_Kernel_Atomic_H
20 #define INC_KY_Kernel_Atomic_H
25 #if defined(KY_OS_WIN32)
27 #elif defined(KY_OS_XBOX360)
30 #elif defined(KY_OS_PS3)
31 #include <sys/synchronization.h>
34 #elif defined(KY_OS_WII)
35 #include <revolution/os.h>
37 #elif defined(KY_OS_PSVITA)
41 #elif defined(KY_OS_3DS)
42 #include <nn/os/os_CriticalSection.h>
44 #elif defined(KY_OS_WIIU)
57 template<
class C>
class AtomicOps;
58 template<
class T>
class AtomicInt;
59 template<
class T>
class AtomicPtr;
101 struct AtomicOpsRawBase
103 #if !defined(KY_ENABLE_THREADS) || defined(KY_CPU_X86) || defined(KY_OS_WIN32) || defined(KY_OS_XBOX) || defined(KY_OS_IPHONE) || defined(KY_OS_3DS)
105 struct FullSync {
inline FullSync() { } };
106 struct AcquireSync {
inline AcquireSync() { } };
107 struct ReleaseSync {
inline ReleaseSync() { } };
109 #elif defined(KY_OS_XBOX360)
115 struct AcquireSync {
inline AcquireSync() { } ~AcquireSync() { __lwsync(); } };
116 struct ReleaseSync {
inline ReleaseSync() { __lwsync(); } ~ReleaseSync() { } };
118 struct FullSync {
inline FullSync() { __lwsync(); } ~FullSync() { __lwsync(); } };
120 #elif defined(KY_CPU_PPC64) || defined(KY_CPU_PPC)
121 struct FullSync {
inline FullSync() {
asm volatile(
"sync\n"); } ~FullSync() {
asm volatile(
"isync\n"); } };
122 struct AcquireSync {
inline AcquireSync() { } ~AcquireSync() {
asm volatile(
"isync\n"); } };
123 struct ReleaseSync {
inline ReleaseSync() {
asm volatile(
"sync\n"); } };
125 #elif defined(KY_CPU_MIPS)
126 struct FullSync {
inline FullSync() {
asm volatile(
"sync\n"); } ~FullSync() {
asm volatile(
"sync\n"); } };
127 struct AcquireSync {
inline AcquireSync() { } ~AcquireSync() {
asm volatile(
"sync\n"); } };
128 struct ReleaseSync {
inline ReleaseSync() {
asm volatile(
"sync\n"); } };
130 #elif defined(KY_CPU_ARM)
131 struct FullSync {
inline FullSync() {
asm volatile(
"dmb\n"); } ~FullSync() {
asm volatile(
"dmb\n"); } };
132 struct AcquireSync {
inline AcquireSync() { } ~AcquireSync() {
asm volatile(
"dmb\n"); } };
133 struct ReleaseSync {
inline ReleaseSync() {
asm volatile(
"dmb\n"); } };
136 #elif (defined(KY_CC_GNU) && (__GNUC__ >= 4)) || defined(KY_CC_CLANG)
138 struct FullSync {
inline FullSync() { } };
139 struct AcquireSync {
inline AcquireSync() { } };
140 struct ReleaseSync {
inline ReleaseSync() { } };
146 struct AtomicOpsRaw_4ByteImpl :
public AtomicOpsRawBase
148 #if !defined(KY_ENABLE_THREADS)
155 #elif defined(KY_OS_WIN32) || defined(KY_OS_WINCE) || defined(KY_OS_XBOX) || defined(KY_OS_XBOX360)
160 #if defined(KY_CC_MSVC) && (KY_CC_MSVC < 1300)
161 typedef T* InterlockTPtr;
163 typedef ET* InterlockETPtr;
165 typedef volatile T* InterlockTPtr;
167 typedef InterlockTPtr InterlockETPtr;
169 inline static T Exchange_NoSync(
volatile T* p, T val) {
return InterlockedExchange((InterlockTPtr)p, val); }
170 inline static T ExchangeAdd_NoSync(
volatile T* p, T val) {
return InterlockedExchangeAdd((InterlockTPtr)p, val); }
171 inline static bool CompareAndSet_NoSync(
volatile T* p, T c, T val) {
return InterlockedCompareExchange((InterlockETPtr)p, (ET)val, (ET)c) == (ET)c; }
173 #elif defined(KY_CPU_PPC64) || defined(KY_CPU_PPC)
176 static inline UInt32 Exchange_NoSync(
volatile register UInt32 *i,
register UInt32 j)
188 static inline UInt32 ExchangeAdd_NoSync(
volatile register UInt32 *i,
register UInt32 j)
190 register UInt32 tmp, ret;
202 static inline bool CompareAndSet_NoSync(
volatile register UInt32 *i,
register UInt32 c,
register UInt32 value)
216 return (ret & 0x20000000) ? 1 : 0;
218 #elif defined(KY_CC_SNC)
219 static inline UInt32 Exchange_NoSync(
volatile register UInt32 *i,
register UInt32 j)
221 return __builtin_cellAtomicStore32((
unsigned int *) i, j);
224 static inline UInt32 ExchangeAdd_NoSync(
volatile register UInt32 *i,
register UInt32 j)
226 return __builtin_cellAtomicAdd32((
unsigned int *) i, j);
229 static inline bool CompareAndSet_NoSync(
volatile register UInt32 *i,
register UInt32 c,
register UInt32 value)
231 return (c == __builtin_cellAtomicCompareAndSwap32((
unsigned int *) i, c, value));
235 static inline UInt32 Exchange_NoSync(
volatile UInt32 *i, UInt32 j)
239 asm volatile(
"1:\n\t"
240 "lwarx %[r],0,%[i]\n\t"
241 "stwcx. %[j],0,%[i]\n\t"
243 :
"+m" (*i), [r]
"=&b" (ret) : [i]
"b" (i), [j]
"b" (j) :
"cc",
"memory");
248 static inline UInt32 ExchangeAdd_NoSync(
volatile UInt32 *i, UInt32 j)
252 asm volatile(
"1:\n\t"
253 "lwarx %[r],0,%[i]\n\t"
254 "add %[o],%[r],%[j]\n\t"
255 "stwcx. %[o],0,%[i]\n\t"
257 :
"+m" (*i), [r]
"=&b" (ret), [o]
"=&r" (dummy) : [i]
"b" (i), [j]
"b" (j) :
"cc",
"memory");
262 static inline bool CompareAndSet_NoSync(
volatile UInt32 *i, UInt32 c, UInt32 value)
266 asm volatile(
"1:\n\t"
267 "lwarx %[r],0,%[i]\n\t"
268 "cmpw 0,%[r],%[cmp]\n\t"
271 "stwcx. %[val],0,%[i]\n\t"
274 :
"+m" (*i), [r]
"=&b" (ret) : [i]
"b" (i), [cmp]
"b" (c), [val]
"b" (value) :
"cc",
"memory");
276 return (ret & 0x20000000) ? 1 : 0;
280 #elif defined(KY_CPU_MIPS)
284 static inline UInt32 Exchange_NoSync(
volatile UInt32 *i, UInt32 j)
288 asm volatile(
"1:\n\t"
298 static inline UInt32 ExchangeAdd_NoSync(
volatile UInt32 *i, UInt32 j)
302 asm volatile(
"1:\n\t"
312 static inline bool CompareAndSet_NoSync(
volatile UInt32 *i, UInt32 c, UInt32 value)
316 asm volatile(
"1:\n\t"
331 static inline UInt32 Exchange_NoSync(
volatile UInt32 *i, UInt32 j)
335 asm volatile(
"1:\n\t"
336 "ll %[r],0(%[i])\n\t"
337 "sc %[j],0(%[i])\n\t"
340 :
"+m" (*i), [r]
"=&d" (ret) : [i]
"d" (i), [j]
"d" (j) :
"cc",
"memory");
345 static inline UInt32 ExchangeAdd_NoSync(
volatile UInt32 *i, UInt32 j)
349 asm volatile(
"1:\n\t"
350 "ll %[r],0(%[i])\n\t"
351 "addu %[j],%[r],%[j]\n\t"
352 "sc %[j],0(%[i])\n\t"
355 :
"+m" (*i), [r]
"=&d" (ret) : [i]
"d" (i), [j]
"d" (j) :
"cc",
"memory");
360 static inline bool CompareAndSet_NoSync(
volatile UInt32 *i, UInt32 c, UInt32 value)
364 asm volatile(
"1:\n\t"
366 "ll %[o],0(%[i])\n\t"
367 "bne %[o],%[c],2f\n\t"
369 "sc %[r],0(%[i])\n\t"
373 :
"+m" (*i),[r]
"=&d" (ret), [o]
"=&d" (dummy) : [i]
"d" (i), [c]
"d" (c), [v]
"d" (value)
380 #elif defined(KY_CPU_ARM) && (defined(KY_CC_ARM) || defined(KY_CC_SNC))
383 static inline UInt32 Exchange_NoSync(
volatile UInt32 *i, UInt32 j)
388 if (__strex(j, i) == 0)
392 static inline UInt32 ExchangeAdd_NoSync(
volatile UInt32 *i, UInt32 j)
397 if (__strex(r + j, i) == 0)
402 static inline bool CompareAndSet_NoSync(
volatile UInt32 *i, UInt32 c, UInt32 value)
409 if (__strex(value, i) == 0)
414 #elif defined(KY_CPU_ARM)
417 static inline UInt32 Exchange_NoSync(
volatile UInt32 *i, UInt32 j)
421 asm volatile(
"1:\n\t"
422 "ldrex %[r],[%[i]]\n\t"
423 "strex %[t],%[j],[%[i]]\n\t"
426 :
"+m" (*i), [r]
"=&r" (ret), [t]
"=&r" (dummy) : [i]
"r" (i), [j]
"r" (j) :
"cc",
"memory");
431 static inline UInt32 ExchangeAdd_NoSync(
volatile UInt32 *i, UInt32 j)
433 UInt32 ret, dummy, test;
435 asm volatile(
"1:\n\t"
436 "ldrex %[r],[%[i]]\n\t"
437 "add %[o],%[r],%[j]\n\t"
438 "strex %[t],%[o],[%[i]]\n\t"
441 :
"+m" (*i), [r]
"=&r" (ret), [o]
"=&r" (dummy), [t]
"=&r" (test) : [i]
"r" (i), [j]
"r" (j) :
"cc",
"memory");
446 static inline bool CompareAndSet_NoSync(
volatile UInt32 *i, UInt32 c, UInt32 value)
448 UInt32 ret = 1, dummy, test;
450 asm volatile(
"1:\n\t"
451 "ldrex %[o],[%[i]]\n\t"
454 "strex %[r],%[v],[%[i]]\n\t"
458 :
"+m" (*i),[r]
"=&r" (ret), [o]
"=&r" (dummy), [t]
"=&r" (test) : [i]
"r" (i), [c]
"r" (c), [v]
"r" (value)
464 #elif defined(KY_CPU_X86)
467 static inline UInt32 Exchange_NoSync(
volatile UInt32 *i, UInt32 j)
469 asm volatile(
"xchgl %1,%[i]\n"
470 :
"+m" (*i),
"=q" (j) : [i]
"m" (*i),
"1" (j) :
"cc",
"memory");
475 static inline UInt32 ExchangeAdd_NoSync(
volatile UInt32 *i, UInt32 j)
477 asm volatile(
"lock; xaddl %1,%[i]\n"
478 :
"+m" (*i),
"+q" (j) : [i]
"m" (*i) :
"cc",
"memory");
483 static inline bool CompareAndSet_NoSync(
volatile UInt32 *i, UInt32 c, UInt32 value)
487 asm volatile(
"lock; cmpxchgl %[v],%[i]\n"
488 :
"+m" (*i),
"=a" (ret) : [i]
"m" (*i),
"1" (c), [v]
"q" (value) :
"cc",
"memory");
493 #elif (defined(KY_CC_GNU) && (__GNUC__ >= 4 && __GNUC_MINOR__ >= 1)) || defined(KY_CC_CLANG)
497 static inline T Exchange_NoSync(
volatile T *i, T j)
502 }
while (!__sync_bool_compare_and_swap(i, v, j));
506 static inline T ExchangeAdd_NoSync(
volatile T *i, T j)
508 return __sync_fetch_and_add(i, j);
511 static inline bool CompareAndSet_NoSync(
volatile T *i, T c, T value)
513 return __sync_bool_compare_and_swap(i, c, value);
522 struct AtomicOpsRaw_8ByteImpl :
public AtomicOpsRawBase
524 #if !defined(KY_64BIT_POINTERS) || !defined(KY_ENABLE_THREADS)
530 #elif defined(KY_OS_WIN32)
534 typedef volatile T* InterlockTPtr;
535 inline static T Exchange_NoSync(
volatile T* p, T val) {
return InterlockedExchange64((InterlockTPtr)p, val); }
536 inline static T ExchangeAdd_NoSync(
volatile T* p, T val) {
return InterlockedExchangeAdd64((InterlockTPtr)p, val); }
537 inline static bool CompareAndSet_NoSync(
volatile T* p, T c, T val) {
return InterlockedCompareExchange64((InterlockTPtr)p, val, c) == c; }
539 #elif defined(KY_CPU_PPC64)
543 static inline UInt64 Exchange_NoSync(
volatile UInt64 *i, UInt64 j)
547 asm volatile(
"1:\n\t"
548 "ldarx %[r],0,%[i]\n\t"
550 "stdcx. %[o],0,%[i]\n\t"
552 :
"+m" (*i), [r]
"=&b" (ret), [o]
"=&r" (dummy) : [i]
"b" (i), [j]
"b" (j) :
"cc");
557 static inline UInt64 ExchangeAdd_NoSync(
volatile UInt64 *i, UInt64 j)
561 asm volatile(
"1:\n\t"
562 "ldarx %[r],0,%[i]\n\t"
563 "add %[o],%[r],%[j]\n\t"
564 "stdcx. %[o],0,%[i]\n\t"
566 :
"+m" (*i), [r]
"=&b" (ret), [o]
"=&r" (dummy) : [i]
"b" (i), [j]
"b" (j) :
"cc");
571 static inline bool CompareAndSet_NoSync(
volatile UInt64 *i, UInt64 c, UInt64 value)
575 asm volatile(
"1:\n\t"
576 "ldarx %[r],0,%[i]\n\t"
577 "cmpw 0,%[r],%[cmp]\n\t"
580 "stdcx. %[val],0,%[i]\n\t"
583 :
"+m" (*i), [r]
"=&b" (ret), [o]
"=&r" (dummy) : [i]
"b" (i), [cmp]
"b" (c), [val]
"b" (value) :
"cc");
585 return (ret & 0x20000000) ? 1 : 0;
588 #elif (defined(KY_CC_GNU) && (__GNUC__ >= 4 && __GNUC_MINOR__ >= 1)) || defined(KY_CC_CLANG)
592 static inline T Exchange_NoSync(
volatile T *i, T j)
597 }
while (!__sync_bool_compare_and_swap(i, v, j));
601 static inline T ExchangeAdd_NoSync(
volatile T *i, T j)
603 return __sync_fetch_and_add(i, j);
606 static inline bool CompareAndSet_NoSync(
volatile T *i, T c, T value)
608 return __sync_bool_compare_and_swap(i, c, value);
622 struct AtomicOpsRaw_DefImpl :
public O
624 typedef typename O::T O_T;
625 typedef typename O::FullSync O_FullSync;
626 typedef typename O::AcquireSync O_AcquireSync;
627 typedef typename O::ReleaseSync O_ReleaseSync;
631 #ifndef KY_ENABLE_THREADS
634 inline static O_T Exchange_NoSync(
volatile O_T* p, O_T val) { O_T old = *p; *p = val;
return old; }
636 inline static O_T ExchangeAdd_NoSync(
volatile O_T* p, O_T val) { O_T old = *p; *p += val;
return old; }
639 inline static bool CompareAndSet_NoSync(
volatile O_T* p, O_T c, O_T val) {
if (*p==c) { *p = val;
return 1; }
return 0; }
647 inline static O_T Exchange_Sync(
volatile O_T* p, O_T val) { O_FullSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::Exchange_NoSync(p, val); }
648 inline static O_T Exchange_Release(
volatile O_T* p, O_T val) { O_ReleaseSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::Exchange_NoSync(p, val); }
649 inline static O_T Exchange_Acquire(
volatile O_T* p, O_T val) { O_AcquireSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::Exchange_NoSync(p, val); }
650 inline static O_T ExchangeAdd_Sync(
volatile O_T* p, O_T val) { O_FullSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::ExchangeAdd_NoSync(p, val); }
651 inline static O_T ExchangeAdd_Release(
volatile O_T* p, O_T val) { O_ReleaseSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::ExchangeAdd_NoSync(p, val); }
652 inline static O_T ExchangeAdd_Acquire(
volatile O_T* p, O_T val) { O_AcquireSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::ExchangeAdd_NoSync(p, val); }
653 inline static bool CompareAndSet_Sync(
volatile O_T* p, O_T c, O_T val) { O_FullSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::CompareAndSet_NoSync(p,c,val); }
654 inline static bool CompareAndSet_Release(
volatile O_T* p, O_T c, O_T val) { O_ReleaseSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::CompareAndSet_NoSync(p,c,val); }
655 inline static bool CompareAndSet_Acquire(
volatile O_T* p, O_T c, O_T val) { O_AcquireSync sync; KY_UNUSED(sync);
return AtomicOpsRaw_DefImpl<O>::CompareAndSet_NoSync(p,c,val); }
661 inline static void Store_Release(
volatile O_T* p, O_T val) { Exchange_Release(p, val); }
663 inline static void Store_Release(
volatile O_T* p, O_T val) { O_ReleaseSync sync; KY_UNUSED(sync); *p = val; }
665 inline static O_T Load_Acquire(
const volatile O_T* p) { O_AcquireSync sync; KY_UNUSED(sync);
return *p; }
670 struct AtomicOpsRaw :
public AtomicOpsRawBase { };
673 struct AtomicOpsRaw<4> :
public AtomicOpsRaw_DefImpl<AtomicOpsRaw_4ByteImpl>
677 { KY_COMPILER_ASSERT(
sizeof(AtomicOpsRaw_DefImpl<AtomicOpsRaw_4ByteImpl>::T) == 4); }
680 struct AtomicOpsRaw<8> :
public AtomicOpsRaw_DefImpl<AtomicOpsRaw_8ByteImpl>
683 { KY_COMPILER_ASSERT(
sizeof(AtomicOpsRaw_DefImpl<AtomicOpsRaw_8ByteImpl>::T) == 8); }
696 typedef AtomicOpsRaw<sizeof(C)> Ops;
697 typedef typename Ops::T T;
698 typedef volatile typename Ops::T* PT;
701 union C2T_union { C c; T t; };
705 inline static C Exchange_Sync(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_Sync((PT)p, u.t);
return u.c; }
706 inline static C Exchange_Release(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_Release((PT)p, u.t);
return u.c; }
707 inline static C Exchange_Acquire(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_Acquire((PT)p, u.t);
return u.c; }
708 inline static C Exchange_NoSync(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::Exchange_NoSync((PT)p, u.t);
return u.c; }
709 inline static C ExchangeAdd_Sync(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_Sync((PT)p, u.t);
return u.c; }
710 inline static C ExchangeAdd_Release(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_Release((PT)p, u.t);
return u.c; }
711 inline static C ExchangeAdd_Acquire(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_Acquire((PT)p, u.t);
return u.c; }
712 inline static C ExchangeAdd_NoSync(
volatile C* p, C val) { C2T_union u; u.c = val; u.t = Ops::ExchangeAdd_NoSync((PT)p, u.t);
return u.c; }
713 inline static bool CompareAndSet_Sync(
volatile C* p, C c, C val) { C2T_union u,cu; u.c = val; cu.c = c;
return Ops::CompareAndSet_Sync((PT)p, cu.t, u.t); }
714 inline static bool CompareAndSet_Release(
volatile C* p, C c, C val){ C2T_union u,cu; u.c = val; cu.c = c;
return Ops::CompareAndSet_Release((PT)p, cu.t, u.t); }
715 inline static bool CompareAndSet_Acquire(
volatile C* p, C c, C val){ C2T_union u,cu; u.c = val; cu.c = c;
return Ops::CompareAndSet_Acquire((PT)p, cu.t, u.t); }
716 inline static bool CompareAndSet_NoSync(
volatile C* p, C c, C val) { C2T_union u,cu; u.c = val; cu.c = c;
return Ops::CompareAndSet_NoSync((PT)p, cu.t, u.t); }
718 inline static void Store_Release(
volatile C* p, C val) { C2T_union u; u.c = val; Ops::Store_Release((PT)p, u.t); }
719 inline static C Load_Acquire(
const volatile C* p) { C2T_union u; u.t = Ops::Load_Acquire((PT)p);
return u.c; }
726 class AtomicValueBase
729 typedef AtomicOps<T> Ops;
734 inline AtomicValueBase() { }
735 explicit inline AtomicValueBase(T val) { Ops::Store_Release(&Value, val); }
739 inline operator T()
const {
return Value; }
742 inline T Exchange_Sync(T val) {
return Ops::Exchange_Sync(&Value, val); }
743 inline T Exchange_Release(T val) {
return Ops::Exchange_Release(&Value, val); }
744 inline T Exchange_Acquire(T val) {
return Ops::Exchange_Acquire(&Value, val); }
745 inline T Exchange_NoSync(T val) {
return Ops::Exchange_NoSync(&Value, val); }
746 inline bool CompareAndSet_Sync(T c, T val) {
return Ops::CompareAndSet_Sync(&Value, c, val); }
747 inline bool CompareAndSet_Release(T c, T val) {
return Ops::CompareAndSet_Release(&Value, c, val); }
748 inline bool CompareAndSet_Acquire(T c, T val) {
return Ops::CompareAndSet_Acquire(&Value, c, val); }
749 inline bool CompareAndSet_NoSync(T c, T val) {
return Ops::CompareAndSet_NoSync(&Value, c, val); }
751 inline void Store_Release(T val) { Ops::Store_Release(&Value, val); }
752 inline T Load_Acquire()
const {
return Ops::Load_Acquire(&Value); }
762 class AtomicPtr :
public AtomicValueBase<T*>
764 typedef typename AtomicValueBase<T*>::Ops Ops;
768 inline AtomicPtr() : AtomicValueBase<T*>() { this->Value = 0; }
769 explicit inline AtomicPtr(T* val) : AtomicValueBase<T*>(val) { }
772 inline T* operator -> ()
const {
return this->Load_Acquire(); }
777 inline operator T* ()
const {
return this->Load_Acquire(); }
784 inline T* ExchangeAdd_Sync(I incr) {
return Ops::ExchangeAdd_Sync(&this->Value, ((T*)0) + incr); }
786 inline T* ExchangeAdd_Release(I incr) {
return Ops::ExchangeAdd_Release(&this->Value, ((T*)0) + incr); }
788 inline T* ExchangeAdd_Acquire(I incr) {
return Ops::ExchangeAdd_Acquire(&this->Value, ((T*)0) + incr); }
790 inline T* ExchangeAdd_NoSync(I incr) {
return Ops::ExchangeAdd_NoSync(&this->Value, ((T*)0) + incr); }
794 inline T* operator = (T* val) { this->Store_Release(val);
return val; }
797 inline T* operator += (I val) {
return ExchangeAdd_Sync(val) + val; }
799 inline T* operator -= (I val) {
return operator += (-val); }
801 inline T* operator ++ () {
return ExchangeAdd_Sync(1) + 1; }
802 inline T* operator -- () {
return ExchangeAdd_Sync(-1) - 1; }
803 inline T* operator ++ (
int) {
return ExchangeAdd_Sync(1); }
804 inline T* operator -- (
int) {
return ExchangeAdd_Sync(-1); }
815 class AtomicInt :
public AtomicValueBase<T>
817 typedef typename AtomicValueBase<T>::Ops Ops;
820 inline AtomicInt() : AtomicValueBase<T>() { }
821 explicit inline AtomicInt(T val) : AtomicValueBase<T>(val) { }
825 inline T ExchangeAdd_Sync(T val) {
return Ops::ExchangeAdd_Sync(&this->Value, val); }
826 inline T ExchangeAdd_Release(T val) {
return Ops::ExchangeAdd_Release(&this->Value, val); }
827 inline T ExchangeAdd_Acquire(T val) {
return Ops::ExchangeAdd_Acquire(&this->Value, val); }
828 inline T ExchangeAdd_NoSync(T val) {
return Ops::ExchangeAdd_NoSync(&this->Value, val); }
830 inline void Increment_Sync() { ExchangeAdd_Sync((T)1); }
831 inline void Increment_Release() { ExchangeAdd_Release((T)1); }
832 inline void Increment_Acquire() { ExchangeAdd_Acquire((T)1); }
833 inline void Increment_NoSync() { ExchangeAdd_NoSync((T)1); }
837 inline T operator = (T val) { this->Store_Release(val);
return val; }
838 inline T operator += (T val) {
return ExchangeAdd_Sync(val) + val; }
839 inline T operator -= (T val) {
return ExchangeAdd_Sync(0 - val) - val; }
841 inline T operator ++ () {
return ExchangeAdd_Sync((T)1) + 1; }
842 inline T operator -- () {
return ExchangeAdd_Sync(((T)0)-1) - 1; }
843 inline T operator ++ (
int) {
return ExchangeAdd_Sync((T)1); }
844 inline T operator -- (
int) {
return ExchangeAdd_Sync(((T)0)-1); }
847 T operator &= (T arg)
853 }
while(!this->CompareAndSet_Sync(comp, newVal));
857 T operator |= (T arg)
863 }
while(!this->CompareAndSet_Sync(comp, newVal));
867 T operator ^= (T arg)
873 }
while(!this->CompareAndSet_Sync(comp, newVal));
877 T operator *= (T arg)
883 }
while(!this->CompareAndSet_Sync(comp, newVal));
887 T operator /= (T arg)
893 }
while(!CompareAndSet_Sync(comp, newVal));
897 T operator >>= (
unsigned bits)
902 newVal = comp >> bits;
903 }
while(!CompareAndSet_Sync(comp, newVal));
907 T operator <<= (
unsigned bits)
912 newVal = comp << bits;
913 }
while(!this->CompareAndSet_Sync(comp, newVal));
932 void operator delete(
void*) {}
937 #if !defined(KY_ENABLE_THREADS)
942 inline Lock(
unsigned) { }
944 inline void DoLock() { }
945 inline void Unlock() { }
948 #elif defined(KY_OS_WIN32)
955 #if defined(KY_FAST_LOCK)
956 AtomicInt<DWORD> LockedThreadId;
957 AtomicInt<int> WaiterCount;
958 volatile HANDLE hSemaphore;
961 unsigned RecursiveLockCount;
963 inline bool PerfLockImmediate(DWORD threadId) {
return LockedThreadId.CompareAndSet_Acquire(0, threadId); }
964 inline void WaiterPlus() { WaiterCount.ExchangeAdd_NoSync(1); }
965 inline void WaiterMinus() { WaiterCount.ExchangeAdd_NoSync(-1); }
967 void PerfLock(DWORD threadId);
969 void AllocateKernelSemaphore();
970 void SetSpinMax(
unsigned maxCount);
976 Lock(
unsigned spinCount = 0);
979 #if !defined(KY_FAST_LOCK)
980 inline void DoLock() { ::EnterCriticalSection(&cs); }
981 inline void Unlock() { ::LeaveCriticalSection(&cs); }
987 #elif defined(KY_OS_WINCE) || defined(KY_OS_XBOX) || defined(KY_OS_XBOX360)
991 KY_EXPORT Lock(
unsigned spinCount = 0);
994 inline void DoLock() { ::EnterCriticalSection(&cs); }
995 inline void Unlock() { ::LeaveCriticalSection(&cs); }
997 #elif defined(KY_OS_PS3)
999 UByte mutex[
sizeof(sys_lwmutex_t) + 4] __attribute__((aligned(4)));
1000 sys_lwmutex_t* pmutex;
1001 static sys_lwmutex_attribute_t LockAttr;
1007 Lock (
unsigned dummy = 0)
1009 pmutex = (sys_lwmutex_t *) (UPInt(&mutex) & 4 ? mutex+4 : mutex);
1010 sys_lwmutex_create(pmutex,&LockAttr);
1013 ~Lock () { sys_lwmutex_destroy(pmutex); }
1014 inline void DoLock() { sys_lwmutex_lock(pmutex,SYS_NO_TIMEOUT); }
1015 inline void Unlock() { sys_lwmutex_unlock(pmutex); }
1017 #elif defined(KY_OS_WII) || defined(KY_OS_WIIU)
1020 Lock (
unsigned dummy = 0) { OSInitMutex(&mutex); KY_UNUSED(dummy); }
1021 inline void DoLock() { OSLockMutex(&mutex); }
1022 inline void Unlock() { OSUnlockMutex(&mutex); }
1024 #elif defined(KY_OS_PSVITA)
1025 UByte mutex[
sizeof(SceKernelLwMutexWork) + 4] __attribute__((aligned(4)));
1026 SceKernelLwMutexWork* pmutex;
1029 Lock (
unsigned dummy = 0)
1031 pmutex = (SceKernelLwMutexWork *) (UPInt(&mutex) & 4 ? mutex+4 : mutex);
1032 int result = sceKernelCreateLwMutex(pmutex,
"SF::Lock", SCE_KERNEL_LW_MUTEX_ATTR_RECURSIVE | SCE_KERNEL_ATTR_TH_FIFO, 0, NULL);
1033 KY_ASSERT(result == SCE_OK);
1036 ~Lock() { sceKernelDeleteLwMutex(pmutex); memset(mutex, 0xfe,
sizeof(mutex)); }
1037 inline void DoLock() {
int result = sceKernelLockLwMutex(pmutex, 1, NULL); KY_ASSERT(result == SCE_OK); }
1038 inline void Unlock() {
int result = sceKernelUnlockLwMutex(pmutex, 1); KY_ASSERT(result == SCE_OK); }
1040 #elif defined(KY_OS_3DS)
1041 nn::os::CriticalSection cs;
1044 Lock (
unsigned dummy = 0) { KY_UNUSED(dummy); cs.Initialize(); }
1045 inline void DoLock() { cs.Enter(); }
1046 inline void Unlock() { cs.Leave(); }
1049 pthread_mutex_t mutex;
1052 static pthread_mutexattr_t RecursiveAttr;
1053 static bool RecursiveAttrInit;
1055 Lock (
unsigned dummy = 0)
1057 if (!RecursiveAttrInit)
1059 pthread_mutexattr_init(&RecursiveAttr);
1060 pthread_mutexattr_settype(&RecursiveAttr, PTHREAD_MUTEX_RECURSIVE);
1061 RecursiveAttrInit = 1;
1064 pthread_mutex_init(&mutex,&RecursiveAttr);
1066 ~Lock () { pthread_mutex_destroy(&mutex); }
1067 inline void DoLock() { pthread_mutex_lock(&mutex); }
1068 inline void Unlock() { pthread_mutex_unlock(&mutex); }
1070 #endif // KY_ENABLE_THREDS
1079 inline Locker(Lock *plock)
1080 { pLock = plock; pLock->DoLock(); }
1082 { pLock->Unlock(); }
1094 LockSafe(
unsigned spinCount = 0) : mLock(spinCount)
1095 #ifdef KY_BUILD_DEBUG
1102 #ifdef KY_BUILD_DEBUG
1111 #ifdef KY_BUILD_DEBUG
1112 KY_ASSERT(LockCount.ExchangeAdd_NoSync(-1) > 0);
1116 #ifdef KY_BUILD_DEBUG
1117 bool IsLocked()
const
1119 return LockCount != 0;
1127 Locker(LockSafe *lock)
1142 TmpUnlocker(LockSafe *lock)
1156 #ifdef KY_BUILD_DEBUG
1157 AtomicInt<int> LockCount;
Definition: gamekitcrowddispersion.h:20