summaryrefslogtreecommitdiffstats
path: root/contrib/llvm/tools/clang/lib/Headers/Intrin.h
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/llvm/tools/clang/lib/Headers/Intrin.h')
-rw-r--r--contrib/llvm/tools/clang/lib/Headers/Intrin.h154
1 files changed, 73 insertions, 81 deletions
diff --git a/contrib/llvm/tools/clang/lib/Headers/Intrin.h b/contrib/llvm/tools/clang/lib/Headers/Intrin.h
index 7ba311e..24b3eae 100644
--- a/contrib/llvm/tools/clang/lib/Headers/Intrin.h
+++ b/contrib/llvm/tools/clang/lib/Headers/Intrin.h
@@ -513,47 +513,40 @@ _BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
return 1;
}
static __inline__ unsigned short __DEFAULT_FN_ATTRS
-__popcnt16(unsigned short value) {
- return __builtin_popcount((int)value);
+__popcnt16(unsigned short _Value) {
+ return __builtin_popcount((int)_Value);
}
static __inline__ unsigned int __DEFAULT_FN_ATTRS
-__popcnt(unsigned int value) {
- return __builtin_popcount(value);
+__popcnt(unsigned int _Value) {
+ return __builtin_popcount(_Value);
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittest(long const *a, long b) {
- return (*a >> b) & 1;
+_bittest(long const *_BitBase, long _BitPos) {
+ return (*_BitBase >> _BitPos) & 1;
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittestandcomplement(long *a, long b) {
- unsigned char x = (*a >> b) & 1;
- *a = *a ^ (1 << b);
- return x;
+_bittestandcomplement(long *_BitBase, long _BitPos) {
+ unsigned char _Res = (*_BitBase >> _BitPos) & 1;
+ *_BitBase = *_BitBase ^ (1 << _BitPos);
+ return _Res;
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittestandreset(long *a, long b) {
- unsigned char x = (*a >> b) & 1;
- *a = *a & ~(1 << b);
- return x;
+_bittestandreset(long *_BitBase, long _BitPos) {
+ unsigned char _Res = (*_BitBase >> _BitPos) & 1;
+ *_BitBase = *_BitBase & ~(1 << _BitPos);
+ return _Res;
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittestandset(long *a, long b) {
- unsigned char x = (*a >> b) & 1;
- *a = *a | (1 << b);
- return x;
+_bittestandset(long *_BitBase, long _BitPos) {
+ unsigned char _Res = (*_BitBase >> _BitPos) & 1;
+ *_BitBase = *_BitBase | (1 << _BitPos);
+ return _Res;
}
-#if defined(__i386__) || defined(__x86_64__)
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_interlockedbittestandset(long volatile *__BitBase, long __BitPos) {
- unsigned char __Res;
- __asm__ ("xor %0, %0\n"
- "lock bts %2, %1\n"
- "setc %0\n"
- : "=r" (__Res), "+m"(*__BitBase)
- : "Ir"(__BitPos));
- return __Res;
+_interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
+ long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
+ return (_PrevVal >> _BitPos) & 1;
}
-#endif
#ifdef __x86_64__
static __inline__ unsigned char __DEFAULT_FN_ATTRS
_BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
@@ -571,40 +564,36 @@ _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
}
static __inline__
unsigned __int64 __DEFAULT_FN_ATTRS
- __popcnt64(unsigned __int64 value) {
- return __builtin_popcountll(value);
+__popcnt64(unsigned __int64 _Value) {
+ return __builtin_popcountll(_Value);
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittest64(__int64 const *a, __int64 b) {
- return (*a >> b) & 1;
+_bittest64(__int64 const *_BitBase, __int64 _BitPos) {
+ return (*_BitBase >> _BitPos) & 1;
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittestandcomplement64(__int64 *a, __int64 b) {
- unsigned char x = (*a >> b) & 1;
- *a = *a ^ (1ll << b);
- return x;
+_bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
+ unsigned char _Res = (*_BitBase >> _BitPos) & 1;
+ *_BitBase = *_BitBase ^ (1ll << _BitPos);
+ return _Res;
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittestandreset64(__int64 *a, __int64 b) {
- unsigned char x = (*a >> b) & 1;
- *a = *a & ~(1ll << b);
- return x;
+_bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
+ unsigned char _Res = (*_BitBase >> _BitPos) & 1;
+ *_BitBase = *_BitBase & ~(1ll << _BitPos);
+ return _Res;
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_bittestandset64(__int64 *a, __int64 b) {
- unsigned char x = (*a >> b) & 1;
- *a = *a | (1ll << b);
- return x;
+_bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
+ unsigned char _Res = (*_BitBase >> _BitPos) & 1;
+ *_BitBase = *_BitBase | (1ll << _BitPos);
+ return _Res;
}
static __inline__ unsigned char __DEFAULT_FN_ATTRS
-_interlockedbittestandset64(__int64 volatile *__BitBase, __int64 __BitPos) {
- unsigned char __Res;
- __asm__ ("xor %0, %0\n"
- "lock bts %2, %1\n"
- "setc %0\n"
- : "=r" (__Res), "+m"(*__BitBase)
- : "Ir"(__BitPos));
- return __Res;
+_interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
+ long long _PrevVal =
+ __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
+ return (_PrevVal >> _BitPos) & 1;
}
#endif
/*----------------------------------------------------------------------------*\
@@ -612,16 +601,16 @@ _interlockedbittestandset64(__int64 volatile *__BitBase, __int64 __BitPos) {
\*----------------------------------------------------------------------------*/
static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
- return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
+ return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
}
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
- return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
+ return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
- return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
+ return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
}
#endif
/*----------------------------------------------------------------------------*\
@@ -629,20 +618,20 @@ _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
\*----------------------------------------------------------------------------*/
static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
- return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+ return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
}
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
- return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+ return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
}
static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedExchangeSub(long volatile *_Subend, long _Value) {
- return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+ return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
- return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+ return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
}
#endif
/*----------------------------------------------------------------------------*\
@@ -650,12 +639,12 @@ _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
\*----------------------------------------------------------------------------*/
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedIncrement16(short volatile *_Value) {
- return __atomic_add_fetch(_Value, 1, 0);
+ return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedIncrement64(__int64 volatile *_Value) {
- return __atomic_add_fetch(_Value, 1, 0);
+ return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
}
#endif
/*----------------------------------------------------------------------------*\
@@ -663,12 +652,12 @@ _InterlockedIncrement64(__int64 volatile *_Value) {
\*----------------------------------------------------------------------------*/
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedDecrement16(short volatile *_Value) {
- return __atomic_sub_fetch(_Value, 1, 0);
+ return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedDecrement64(__int64 volatile *_Value) {
- return __atomic_sub_fetch(_Value, 1, 0);
+ return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
}
#endif
/*----------------------------------------------------------------------------*\
@@ -676,20 +665,20 @@ _InterlockedDecrement64(__int64 volatile *_Value) {
\*----------------------------------------------------------------------------*/
static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedAnd8(char volatile *_Value, char _Mask) {
- return __atomic_and_fetch(_Value, _Mask, 0);
+ return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedAnd16(short volatile *_Value, short _Mask) {
- return __atomic_and_fetch(_Value, _Mask, 0);
+ return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedAnd(long volatile *_Value, long _Mask) {
- return __atomic_and_fetch(_Value, _Mask, 0);
+ return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
- return __atomic_and_fetch(_Value, _Mask, 0);
+ return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
#endif
/*----------------------------------------------------------------------------*\
@@ -697,20 +686,20 @@ _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
\*----------------------------------------------------------------------------*/
static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedOr8(char volatile *_Value, char _Mask) {
- return __atomic_or_fetch(_Value, _Mask, 0);
+ return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedOr16(short volatile *_Value, short _Mask) {
- return __atomic_or_fetch(_Value, _Mask, 0);
+ return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedOr(long volatile *_Value, long _Mask) {
- return __atomic_or_fetch(_Value, _Mask, 0);
+ return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
- return __atomic_or_fetch(_Value, _Mask, 0);
+ return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
#endif
/*----------------------------------------------------------------------------*\
@@ -718,20 +707,20 @@ _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
\*----------------------------------------------------------------------------*/
static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedXor8(char volatile *_Value, char _Mask) {
- return __atomic_xor_fetch(_Value, _Mask, 0);
+ return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedXor16(short volatile *_Value, short _Mask) {
- return __atomic_xor_fetch(_Value, _Mask, 0);
+ return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedXor(long volatile *_Value, long _Mask) {
- return __atomic_xor_fetch(_Value, _Mask, 0);
+ return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
- return __atomic_xor_fetch(_Value, _Mask, 0);
+ return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
}
#endif
/*----------------------------------------------------------------------------*\
@@ -739,18 +728,18 @@ _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
\*----------------------------------------------------------------------------*/
static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedExchange8(char volatile *_Target, char _Value) {
- __atomic_exchange(_Target, &_Value, &_Value, 0);
+ __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
return _Value;
}
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedExchange16(short volatile *_Target, short _Value) {
- __atomic_exchange(_Target, &_Value, &_Value, 0);
+ __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
return _Value;
}
#ifdef __x86_64__
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
- __atomic_exchange(_Target, &_Value, &_Value, 0);
+ __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
return _Value;
}
#endif
@@ -760,19 +749,22 @@ _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedCompareExchange8(char volatile *_Destination,
char _Exchange, char _Comparand) {
- __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
+ __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
+ __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
return _Comparand;
}
static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedCompareExchange16(short volatile *_Destination,
short _Exchange, short _Comparand) {
- __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
+ __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
+ __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
return _Comparand;
}
static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedCompareExchange64(__int64 volatile *_Destination,
__int64 _Exchange, __int64 _Comparand) {
- __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
+ __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
+ __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
return _Comparand;
}
/*----------------------------------------------------------------------------*\
OpenPOWER on IntegriCloud