42 #ifndef QATOMIC_IA64_H
43 #define QATOMIC_IA64_H
49 #define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
50 #define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_WAIT_FREE
57 #define Q_ATOMIC_INT_TEST_AND_SET_IS_ALWAYS_NATIVE
58 #define Q_ATOMIC_INT_TEST_AND_SET_IS_WAIT_FREE
65 #define Q_ATOMIC_INT_FETCH_AND_STORE_IS_ALWAYS_NATIVE
66 #define Q_ATOMIC_INT_FETCH_AND_STORE_IS_WAIT_FREE
73 #define Q_ATOMIC_INT_FETCH_AND_ADD_IS_ALWAYS_NATIVE
80 #define Q_ATOMIC_POINTER_TEST_AND_SET_IS_ALWAYS_NATIVE
81 #define Q_ATOMIC_POINTER_TEST_AND_SET_IS_WAIT_FREE
90 #define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_ALWAYS_NATIVE
91 #define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_WAIT_FREE
100 #define Q_ATOMIC_POINTER_FETCH_AND_ADD_IS_ALWAYS_NATIVE
102 template <
typename T>
105 template <
typename T>
111 return value == 1 || value == -1
112 || value == 4 || value == -4
113 || value == 8 || value == -8
114 || value == 16 || value == -16;
117 #if defined(Q_CC_INTEL)
120 #include <ia64intrin.h>
124 return static_cast<int>(_InterlockedExchange(&
_q_value, newValue));
130 return static_cast<int>(_InterlockedExchange(&
_q_value, newValue));
135 register int expectedValueCopy = expectedValue;
136 return (static_cast<int>(_InterlockedCompareExchange(&
_q_value,
144 register int expectedValueCopy = expectedValue;
145 return (static_cast<int>(_InterlockedCompareExchange_acq(reinterpret_cast<volatile uint *>(&
_q_value),
153 register int expectedValueCopy = expectedValue;
154 return (static_cast<int>(_InterlockedCompareExchange_rel(reinterpret_cast<volatile uint *>(&
_q_value),
168 if (__builtin_constant_p(valueToAdd)) {
170 return __fetchadd4_acq((
unsigned int *)&
_q_value, 1);
171 if (valueToAdd == -1)
172 return __fetchadd4_acq((
unsigned int *)&
_q_value, -1);
174 return _InterlockedExchangeAdd(&
_q_value, valueToAdd);
179 if (__builtin_constant_p(valueToAdd)) {
181 return __fetchadd4_rel((
unsigned int *)&
_q_value, 1);
182 if (valueToAdd == -1)
183 return __fetchadd4_rel((
unsigned int *)&
_q_value, -1);
186 return _InterlockedExchangeAdd(&
_q_value, valueToAdd);
197 return _InterlockedIncrement(&
_q_value) != 0;
202 return _InterlockedDecrement(&
_q_value) != 0;
205 template <
typename T>
208 return (T *)_InterlockedExchangePointer(reinterpret_cast<void * volatile*>(&_q_value), newValue);
211 template <
typename T>
215 return fetchAndStoreAcquire(newValue);
218 template <
typename T>
221 register T *expectedValueCopy = expectedValue;
222 return (_InterlockedCompareExchangePointer(reinterpret_cast<void * volatile*>(&_q_value),
228 template <
typename T>
233 volatile unsigned long *
p;
236 register T *expectedValueCopy = expectedValue;
237 return (_InterlockedCompareExchange64_acq(
p, quintptr(newValue), quintptr(expectedValueCopy))
238 == quintptr(expectedValue));
241 template <
typename T>
246 volatile unsigned long *
p;
249 register T *expectedValueCopy = expectedValue;
250 return (_InterlockedCompareExchange64_rel(
p, quintptr(newValue), quintptr(expectedValueCopy))
251 == quintptr(expectedValue));
254 template <
typename T>
258 return testAndSetAcquire(expectedValue, newValue);
261 template <
typename T>
264 return (T *)_InterlockedExchangeAdd64((
volatile long *)&_q_value,
265 valueToAdd *
sizeof(T));
268 template <
typename T>
272 return (T *)_InterlockedExchangeAdd64((
volatile long *)&_q_value,
273 valueToAdd *
sizeof(T));
276 template <
typename T>
280 return fetchAndAddAcquire(valueToAdd);
285 # if defined(Q_CC_GNU)
290 asm volatile(
"xchg4 %0=%1,%2\n"
311 asm volatile(
"mov ar.ccv=%2\n"
313 "cmpxchg4.acq %0=%1,%3,ar.ccv\n"
315 :
"r" (expectedValue),
"r" (newValue)
317 return ret == expectedValue;
323 asm volatile(
"mov ar.ccv=%2\n"
325 "cmpxchg4.rel %0=%1,%3,ar.ccv\n"
327 :
"r" (expectedValue),
"r" (newValue)
329 return ret == expectedValue;
339 asm volatile(
"fetchadd4.acq %0=%1,%2\n"
354 " cmpxchg4.acq %0=%1,%0,ar.ccv\n"
356 " cmp.ne p6,p0 = %0, r9\n"
361 :
"r9",
"p6",
"memory");
372 asm volatile(
"fetchadd4.rel %0=%1,%2\n"
387 " cmpxchg4.rel %0=%1,%0,ar.ccv\n"
389 " cmp.ne p6,p0 = %0, r9\n"
394 :
"r9",
"p6",
"memory");
400 asm volatile(
"mf" :::
"memory");
407 asm volatile(
"fetchadd4.acq %0=%1,1\n"
417 asm volatile(
"fetchadd4.rel %0=%1,-1\n"
424 template <
typename T>
428 asm volatile(
"xchg8 %0=%1,%2\n"
429 :
"=r" (ret),
"+m" (_q_value)
435 template <
typename T>
441 :
"=r" (ret),
"+m" (_q_value)
447 template <
typename T>
451 asm volatile(
"mov ar.ccv=%2\n"
453 "cmpxchg8.acq %0=%1,%3,ar.ccv\n"
454 :
"=r" (ret),
"+m" (_q_value)
455 :
"r" (expectedValue),
"r" (newValue)
457 return ret == expectedValue;
460 template <
typename T>
464 asm volatile(
"mov ar.ccv=%2\n"
466 "cmpxchg8.rel %0=%1,%3,ar.ccv\n"
467 :
"=r" (ret),
"+m" (_q_value)
468 :
"r" (expectedValue),
"r" (newValue)
470 return ret == expectedValue;
473 template <
typename T>
481 asm volatile(
"fetchadd8.acq %0=%1,%2\n"
482 :
"=r" (ret),
"+m" (_q_value)
483 :
"i" (valueToAdd *
sizeof(T))
496 " cmpxchg8.acq %0=%1,%0,ar.ccv\n"
498 " cmp.ne p6,p0 = %0, r9\n"
501 :
"+r" (ret),
"+m" (_q_value)
502 :
"r" (valueToAdd *
sizeof(T))
503 :
"r9",
"p6",
"memory");
507 template <
typename T>
515 asm volatile(
"fetchadd8.rel %0=%1,%2\n"
516 :
"=r" (ret),
"+m" (_q_value)
517 :
"i" (valueToAdd *
sizeof(T))
530 " cmpxchg8.rel %0=%1,%0,ar.ccv\n"
532 " cmp.ne p6,p0 = %0, r9\n"
535 :
"+r" (ret),
"+m" (_q_value)
536 :
"r" (valueToAdd *
sizeof(T))
537 :
"r9",
"p6",
"memory");
541 template <
typename T>
544 asm volatile(
"mf" :::
"memory");
545 return fetchAndAddRelease(valueToAdd);
548 #elif defined Q_CC_HPACC
551 #include <ia64/sys/inline.h>
554 #define FENCE (_Asm_fence)(_UP_CALL_FENCE | _UP_SYS_FENCE | _DOWN_CALL_FENCE | _DOWN_SYS_FENCE)
558 return _Asm_xchg((_Asm_sz)_SZ_W, &
_q_value, (
unsigned)newValue,
559 (_Asm_ldhint)_LDHINT_NONE, FENCE);
565 return _Asm_xchg((_Asm_sz)_SZ_W, &
_q_value, (
unsigned)newValue,
566 (_Asm_ldhint)_LDHINT_NONE, FENCE);
571 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
unsigned)expectedValue, FENCE);
572 int ret = _Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_ACQ,
573 &
_q_value, (
unsigned)newValue, (_Asm_ldhint)_LDHINT_NONE);
574 return ret == expectedValue;
579 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
unsigned)expectedValue, FENCE);
580 int ret = _Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_REL,
581 &
_q_value, newValue, (_Asm_ldhint)_LDHINT_NONE);
582 return ret == expectedValue;
588 return _Asm_fetchadd((_Asm_fasz)_FASZ_W, (_Asm_sem)_SEM_ACQ,
589 &
_q_value, 1, (_Asm_ldhint)_LDHINT_NONE, FENCE);
590 else if (valueToAdd == -1)
591 return _Asm_fetchadd((_Asm_fasz)_FASZ_W, (_Asm_sem)_SEM_ACQ,
592 &
_q_value, -1, (_Asm_ldhint)_LDHINT_NONE, FENCE);
595 register int old, ret;
598 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
unsigned)old, FENCE);
599 ret = _Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_ACQ,
600 &
_q_value, old + valueToAdd, (_Asm_ldhint)_LDHINT_NONE);
601 }
while (ret != old);
608 return _Asm_fetchadd((_Asm_fasz)_FASZ_W, (_Asm_sem)_SEM_REL,
609 &
_q_value, 1, (_Asm_ldhint)_LDHINT_NONE, FENCE);
610 else if (valueToAdd == -1)
611 return _Asm_fetchadd((_Asm_fasz)_FASZ_W, (_Asm_sem)_SEM_REL,
612 &
_q_value, -1, (_Asm_ldhint)_LDHINT_NONE, FENCE);
615 register int old, ret;
618 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
unsigned)old, FENCE);
619 ret = _Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_REL,
620 &
_q_value, old + valueToAdd, (_Asm_ldhint)_LDHINT_NONE);
621 }
while (ret != old);
633 return (
int)_Asm_fetchadd((_Asm_fasz)_FASZ_W, (_Asm_sem)_SEM_ACQ,
634 &
_q_value, 1, (_Asm_ldhint)_LDHINT_NONE, FENCE) != -1;
639 return (
int)_Asm_fetchadd((_Asm_fasz)_FASZ_W, (_Asm_sem)_SEM_REL,
640 &
_q_value, -1, (_Asm_ldhint)_LDHINT_NONE, FENCE) != 1;
643 template <
typename T>
647 return (T *)_Asm_xchg((_Asm_sz)_SZ_D, &_q_value, (
quint64)newValue,
648 (_Asm_ldhint)_LDHINT_NONE, FENCE);
650 return (T *)_Asm_xchg((_Asm_sz)_SZ_W, &_q_value, (
quint32)newValue,
651 (_Asm_ldhint)_LDHINT_NONE, FENCE);
655 template <
typename T>
659 return fetchAndStoreAcquire(newValue);
662 template <
typename T>
666 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint64)expectedValue, FENCE);
667 T *ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_D, (_Asm_sem)_SEM_ACQ,
668 &_q_value, (
quint64)newValue, (_Asm_ldhint)_LDHINT_NONE);
670 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint32)expectedValue, FENCE);
671 T *ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_ACQ,
672 &_q_value, (
quint32)newValue, (_Asm_ldhint)_LDHINT_NONE);
674 return ret == expectedValue;
677 template <
typename T>
681 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint64)expectedValue, FENCE);
682 T *ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_D, (_Asm_sem)_SEM_REL,
683 &_q_value, (
quint64)newValue, (_Asm_ldhint)_LDHINT_NONE);
685 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint32)expectedValue, FENCE);
686 T *ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_REL,
687 &_q_value, (
quint32)newValue, (_Asm_ldhint)_LDHINT_NONE);
689 return ret == expectedValue;
692 template <
typename T>
696 register T *old, *ret;
700 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint64)old, FENCE);
701 ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_D, (_Asm_sem)_SEM_ACQ,
702 &_q_value, (
quint64)(old + valueToAdd),
703 (_Asm_ldhint)_LDHINT_NONE);
705 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint32)old, FENCE);
706 ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_ACQ,
707 &_q_value, (
quint32)(old + valueToAdd),
708 (_Asm_ldhint)_LDHINT_NONE);
710 }
while (old != ret);
714 template <
typename T>
718 register T *old, *ret;
722 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint64)old, FENCE);
723 ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_D, (_Asm_sem)_SEM_REL,
724 &_q_value, (
quint64)(old + valueToAdd),
725 (_Asm_ldhint)_LDHINT_NONE);
727 _Asm_mov_to_ar((_Asm_app_reg)_AREG_CCV, (
quint32)old, FENCE);
728 ret = (T *)_Asm_cmpxchg((_Asm_sz)_SZ_W, (_Asm_sem)_SEM_REL,
729 &_q_value, (
quint32)(old + valueToAdd),
730 (_Asm_ldhint)_LDHINT_NONE);
732 }
while (old != ret);
736 template <
typename T>
740 return fetchAndAddAcquire(valueToAdd);
762 template <
typename T>
765 return testAndSetAcquire(expectedValue, newValue);
768 template <
typename T>
771 return testAndSetAcquire(expectedValue, newValue);
791 template <
typename T>
794 return fetchAndStoreAcquire(newValue);
797 template <
typename T>
800 return fetchAndStoreRelaxed(newValue);
803 template <
typename T>
806 return fetchAndAddAcquire(valueToAdd);
813 #endif // QATOMIC_IA64_H
int fetchAndStoreRelaxed(int newValue)
bool testAndSetRelease(int expectedValue, int newValue)
static bool isFetchAndAddNative()
bool _q_ia64_fetchadd_immediate(register int value)
T * fetchAndAddAcquire(qptrdiff valueToAdd)
T * fetchAndAddOrdered(qptrdiff valueToAdd)
static bool isReferenceCountingNative()
int fetchAndAddRelaxed(int valueToAdd)
#define QT_END_INCLUDE_NAMESPACE
T * fetchAndStoreAcquire(T *newValue)
T * fetchAndAddRelease(qptrdiff valueToAdd)
int fetchAndAddAcquire(int valueToAdd)
static bool isReferenceCountingWaitFree()
unsigned long long quint64
T * fetchAndAddRelaxed(qptrdiff valueToAdd)
static bool isFetchAndAddNative()
bool testAndSetOrdered(T *expectedValue, T *newValue)
bool testAndSetAcquire(int expectedValue, int newValue)
static bool isFetchAndStoreNative()
#define QT_BEGIN_NAMESPACE
static bool isTestAndSetNative()
static bool isTestAndSetNative()
static bool isFetchAndStoreNative()
T * fetchAndStoreRelaxed(T *newValue)
Q_CORE_EXPORT int q_atomic_test_and_set_ptr(volatile void *ptr, void *expected, void *newval)
static bool isFetchAndAddWaitFree()
bool testAndSetRelaxed(int expectedValue, int newValue)
T * fetchAndStoreRelease(T *newValue)
bool testAndSetRelease(T *expectedValue, T *newValue)
static bool isFetchAndStoreWaitFree()
static bool isTestAndSetWaitFree()
GLsizei const GLfloat * value
bool testAndSetAcquire(T *expectedValue, T *newValue)
Q_CORE_EXPORT int q_atomic_test_and_set_int(volatile int *ptr, int expected, int newval)
static bool isFetchAndAddWaitFree()
T * fetchAndStoreOrdered(T *newValue)
#define QT_BEGIN_INCLUDE_NAMESPACE
static bool isFetchAndStoreWaitFree()
bool testAndSetRelaxed(T *expectedValue, T *newValue)
int fetchAndStoreAcquire(int newValue)
static bool isTestAndSetWaitFree()
bool testAndSetOrdered(int expectedValue, int newValue)
int fetchAndAddOrdered(int valueToAdd)
int fetchAndStoreOrdered(int newValue)
int fetchAndAddRelease(int valueToAdd)
int fetchAndStoreRelease(int newValue)