Commit a062bb4b by Andi Kleen Committed by Andi Kleen

Mark all member functions with memory models always inline v2

When a non constant memory model is passed to __atomic_*
gcc falls back to seq_cst. This drops any HLE acquire or release bits.

This can happen when <atomic> is used with -O0
as the member functions are not always inlined then and the memory
argument passed in ends up being non-constant.

v2: Use _GLIBCXX_ALWAYS_INLINE

libstdc++-v3/:

2013-05-08  Andi Kleen  <ak@linux.intel.com>

	PR target/55947
	* libstdc++-v3/include/bits/atomic_base.h
	(_GLIBCXX_ALWAYS_INLINE): Add new macro.
	(atomic_thread_fence, atomic_signal_fence, test_and_set,
	clear, store, load, exchange, compare_exchange_weak)
	compare_exchange_strong, fetch_add, fetch_sub, fetch_and,
	fetch_or, fetch_xor): Mark _GLIBCXX_ALWAYS_INLINE.

From-SVN: r198733
parent 785b887e
2013-05-08 Andi Kleen <ak@linux.intel.com>
PR target/55947
* libstdc++-v3/include/bits/atomic_base.h
(_GLIBCXX_ALWAYS_INLINE): Add new macro.
(atomic_thread_fence, atomic_signal_fence, test_and_set,
clear, store, load, exchange, compare_exchange_weak)
compare_exchange_strong, fetch_add, fetch_sub, fetch_and,
fetch_or, fetch_xor): Mark _GLIBCXX_ALWAYS_INLINE.
2013-05-08 Jason Merrill <jason@redhat.com>
Add std::bad_array_new_length (N2932)
......
......@@ -37,6 +37,10 @@
#include <stdint.h>
#include <bits/atomic_lockfree_defines.h>
#ifndef _GLIBCXX_ALWAYS_INLINE
#define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
#endif
namespace std _GLIBCXX_VISIBILITY(default)
{
_GLIBCXX_BEGIN_NAMESPACE_VERSION
......@@ -94,11 +98,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
| (__m & __memory_order_modifier_mask));
}
inline void
_GLIBCXX_ALWAYS_INLINE void
atomic_thread_fence(memory_order __m) noexcept
{ __atomic_thread_fence(__m); }
inline void
_GLIBCXX_ALWAYS_INLINE void
atomic_signal_fence(memory_order __m) noexcept
{ __atomic_signal_fence(__m); }
......@@ -281,19 +285,19 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
: __atomic_flag_base{ _S_init(__i) }
{ }
bool
_GLIBCXX_ALWAYS_INLINE bool
test_and_set(memory_order __m = memory_order_seq_cst) noexcept
{
return __atomic_test_and_set (&_M_i, __m);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
{
return __atomic_test_and_set (&_M_i, __m);
}
void
_GLIBCXX_ALWAYS_INLINE void
clear(memory_order __m = memory_order_seq_cst) noexcept
{
memory_order __b = __m & __memory_order_mask;
......@@ -304,7 +308,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__atomic_clear (&_M_i, __m);
}
void
_GLIBCXX_ALWAYS_INLINE void
clear(memory_order __m = memory_order_seq_cst) volatile noexcept
{
memory_order __b = __m & __memory_order_mask;
......@@ -463,7 +467,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
is_lock_free() const volatile noexcept
{ return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
void
_GLIBCXX_ALWAYS_INLINE void
store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
{
memory_order __b = __m & __memory_order_mask;
......@@ -474,7 +478,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__atomic_store_n(&_M_i, __i, __m);
}
void
_GLIBCXX_ALWAYS_INLINE void
store(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
......@@ -486,7 +490,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__atomic_store_n(&_M_i, __i, __m);
}
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
load(memory_order __m = memory_order_seq_cst) const noexcept
{
memory_order __b = __m & __memory_order_mask;
......@@ -496,7 +500,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_load_n(&_M_i, __m);
}
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
memory_order __b = __m & __memory_order_mask;
......@@ -506,7 +510,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_load_n(&_M_i, __m);
}
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{
......@@ -514,14 +518,14 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
}
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return __atomic_exchange_n(&_M_i, __i, __m);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) noexcept
{
......@@ -534,7 +538,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1,
memory_order __m2) volatile noexcept
......@@ -548,7 +552,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) noexcept
{
......@@ -556,7 +560,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__cmpexch_failure_order(__m));
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
......@@ -564,7 +568,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__cmpexch_failure_order(__m));
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) noexcept
{
......@@ -577,7 +581,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1,
memory_order __m2) volatile noexcept
......@@ -592,7 +596,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) noexcept
{
......@@ -600,7 +604,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__cmpexch_failure_order(__m));
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
......@@ -608,52 +612,52 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__cmpexch_failure_order(__m));
}
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_add(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_add(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_sub(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_sub(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_and(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_and(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_or(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_or(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_xor(&_M_i, __i, __m); }
__int_type
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_xor(&_M_i, __i, __m); }
......@@ -770,7 +774,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
is_lock_free() const volatile noexcept
{ return __atomic_is_lock_free(_M_type_size(1), nullptr); }
void
_GLIBCXX_ALWAYS_INLINE void
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
......@@ -783,7 +787,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__atomic_store_n(&_M_p, __p, __m);
}
void
_GLIBCXX_ALWAYS_INLINE void
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
......@@ -795,7 +799,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__atomic_store_n(&_M_p, __p, __m);
}
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
load(memory_order __m = memory_order_seq_cst) const noexcept
{
memory_order __b = __m & __memory_order_mask;
......@@ -805,7 +809,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_load_n(&_M_p, __m);
}
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
memory_order __b = __m & __memory_order_mask;
......@@ -815,7 +819,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_load_n(&_M_p, __m);
}
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
......@@ -823,14 +827,14 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
}
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return __atomic_exchange_n(&_M_p, __p, __m);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
memory_order __m1,
memory_order __m2) noexcept
......@@ -844,7 +848,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
}
bool
_GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
memory_order __m1,
memory_order __m2) volatile noexcept
......@@ -859,22 +863,22 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
}
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
__pointer_type
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment