42 #ifndef QATOMIC_S390_H
43 #define QATOMIC_S390_H
49 #define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
56 #define Q_ATOMIC_INT_TEST_AND_SET_IS_ALWAYS_NATIVE
63 #define Q_ATOMIC_INT_FETCH_AND_STORE_IS_ALWAYS_NATIVE
70 #define Q_ATOMIC_INT_FETCH_AND_ADD_IS_ALWAYS_NATIVE
77 #define Q_ATOMIC_POINTER_TEST_AND_SET_IS_ALWAYS_NATIVE
86 #define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_ALWAYS_NATIVE
95 #define Q_ATOMIC_POINTER_FETCH_AND_ADD_IS_ALWAYS_NATIVE
100 template <
typename T>
105 #define __GNU_EXTENSION __extension__
107 #define __GNU_EXTENSION
110 #define __CS_LOOP(ptr, op_val, op_string, pre, post) __GNU_EXTENSION ({ \
111 volatile int old_val, new_val; \
112 __asm__ __volatile__(pre \
115 op_string " %1,%4\n" \
116 " cs %0,%1,0(%3)\n" \
119 : "=&d" (old_val), "=&d" (new_val), \
121 : "a" (ptr), "d" (op_val), \
123 : "cc", "memory" ); \
127 #define __CS_OLD_LOOP(ptr, op_val, op_string, pre, post ) __GNU_EXTENSION ({ \
128 volatile int old_val, new_val; \
129 __asm__ __volatile__(pre \
132 op_string " %1,%4\n" \
133 " cs %0,%1,0(%3)\n" \
136 : "=&d" (old_val), "=&d" (new_val), \
138 : "a" (ptr), "d" (op_val), \
140 : "cc", "memory" ); \
145 #define __CSG_OLD_LOOP(ptr, op_val, op_string, pre, post) __GNU_EXTENSION ({ \
146 long old_val, new_val; \
147 __asm__ __volatile__(pre \
150 op_string " %1,%4\n" \
151 " csg %0,%1,0(%3)\n" \
154 : "=&d" (old_val), "=&d" (new_val), \
156 : "a" (ptr), "d" (op_val), \
158 : "cc", "memory" ); \
176 __asm__ __volatile__(
183 :
"a" (&
_q_value),
"d" (expectedValue) ,
"d" (newValue),
191 __asm__ __volatile__(
199 :
"a" (&
_q_value),
"d" (expectedValue) ,
"d" (newValue),
207 __asm__ __volatile__(
215 :
"a" (&
_q_value),
"d" (expectedValue) ,
"d" (newValue),
265 template <
typename T>
271 __asm__ __volatile__(
277 :
"=&d" (retval),
"=m" (_q_value)
278 :
"a" (&_q_value),
"d" (expectedValue) ,
"d" (newValue),
279 "m" (_q_value) :
"cc",
"memory" );
281 __asm__ __volatile__(
287 :
"=&d" (retval),
"=m" (_q_value)
288 :
"a" (&_q_value),
"d" (expectedValue) ,
"d" (newValue),
289 "m" (_q_value) :
"cc",
"memory" );
295 template <
typename T>
301 __asm__ __volatile__(
308 :
"=&d" (retval),
"=m" (_q_value)
309 :
"a" (&_q_value),
"d" (expectedValue) ,
"d" (newValue),
310 "m" (_q_value) :
"cc",
"memory" );
312 __asm__ __volatile__(
319 :
"=&d" (retval),
"=m" (_q_value)
320 :
"a" (&_q_value),
"d" (expectedValue) ,
"d" (newValue),
321 "m" (_q_value) :
"cc",
"memory" );
327 template <
typename T>
333 __asm__ __volatile__(
340 :
"=&d" (retval),
"=m" (_q_value)
341 :
"a" (&_q_value),
"d" (expectedValue) ,
"d" (newValue),
342 "m" (_q_value) :
"cc",
"memory" );
344 __asm__ __volatile__(
351 :
"=&d" (retval),
"=m" (_q_value)
352 :
"a" (&_q_value),
"d" (expectedValue) ,
"d" (newValue),
353 "m" (_q_value) :
"cc",
"memory" );
359 template <
typename T>
362 return testAndSetAcquire(expectedValue, newValue);
365 template <
typename T>
369 return (T*)
__CS_OLD_LOOP(&_q_value, (
int)newValue,
"lr",
"",
"");
371 return (T*)__CSG_OLD_LOOP(&_q_value, (
long)newValue,
"lgr",
"",
"");
375 template <
typename T>
379 return (T*)
__CS_OLD_LOOP(&_q_value, (
int)newValue,
"lr",
"",
"bcr 15,0 \n");
381 return (T*)__CSG_OLD_LOOP(&_q_value, (
long)newValue,
"lgr",
"",
"bcr 15,0 \n");
385 template <
typename T>
389 return (T*)
__CS_OLD_LOOP(&_q_value, (
int)newValue,
"lr",
"bcr 15,0 \n",
"");
391 return (T*)__CSG_OLD_LOOP(&_q_value, (
long)newValue,
"lgr",
"bcr 15,0\n",
"");
395 template <
typename T>
398 return fetchAndStoreAcquire(newValue);
402 template <
typename T>
405 return fetchAndAddOrdered(valueToAdd);
408 template <
typename T>
411 return fetchAndAddOrdered(valueToAdd);
414 template <
typename T>
417 return fetchAndAddOrdered(valueToAdd);
420 #undef __GNU_EXTENSION
426 #endif // QATOMIC_S390_H
int fetchAndStoreRelaxed(int newValue)
bool testAndSetRelease(int expectedValue, int newValue)
static bool isFetchAndAddNative()
T * fetchAndAddAcquire(qptrdiff valueToAdd)
static bool isReferenceCountingNative()
int fetchAndAddRelaxed(int valueToAdd)
T * fetchAndStoreAcquire(T *newValue)
T * fetchAndAddRelease(qptrdiff valueToAdd)
int fetchAndAddAcquire(int valueToAdd)
static bool isReferenceCountingWaitFree()
T * fetchAndAddRelaxed(qptrdiff valueToAdd)
static bool isFetchAndAddNative()
bool testAndSetOrdered(T *expectedValue, T *newValue)
bool testAndSetAcquire(int expectedValue, int newValue)
#define __CS_OLD_LOOP(ptr, op_val, op_string, pre, post)
static bool isFetchAndStoreNative()
#define QT_BEGIN_NAMESPACE
static bool isTestAndSetNative()
static bool isTestAndSetNative()
static bool isFetchAndStoreNative()
T * fetchAndStoreRelaxed(T *newValue)
static bool isFetchAndAddWaitFree()
bool testAndSetRelaxed(int expectedValue, int newValue)
T * fetchAndStoreRelease(T *newValue)
bool testAndSetRelease(T *expectedValue, T *newValue)
#define __CS_LOOP(ptr, op_val, op_string, pre, post)
static bool isFetchAndStoreWaitFree()
static bool isTestAndSetWaitFree()
bool testAndSetAcquire(T *expectedValue, T *newValue)
static bool isFetchAndAddWaitFree()
T * fetchAndStoreOrdered(T *newValue)
static bool isFetchAndStoreWaitFree()
bool testAndSetRelaxed(T *expectedValue, T *newValue)
int fetchAndStoreAcquire(int newValue)
static bool isTestAndSetWaitFree()
bool testAndSetOrdered(int expectedValue, int newValue)
int fetchAndAddOrdered(int valueToAdd)
int fetchAndStoreOrdered(int newValue)
int fetchAndAddRelease(int valueToAdd)
int fetchAndStoreRelease(int newValue)