Commit b93d5ca9 by Benjamin Kosnik Committed by Benjamin Kosnik

gnu.ver: Spell out exact signatures for atomic access functions.

2006-08-30  Benjamin Kosnik  <bkoz@redhat.com>
            Richard Guenther  <rguenther@suse.de>
	
	* config/abi/pre/gnu.ver: Spell out exact signatures for atomic
	access functions.

	* include/bits/atomicity.h (__atomic_add_dispatch): Remove
	volatile qualification for _Atomic_word argument.
	(__atomic_add_single): Same.
	(__exchange_and_add_dispatch): Same.
	(__exchange_and_add_single): Same.
	

Co-Authored-By: Richard Guenther <rguenther@suse.de>

From-SVN: r116584
parent c87555d6
2006-08-30 Benjamin Kosnik <bkoz@redhat.com>
Richard Guenther <rguenther@suse.de>
* config/abi/pre/gnu.ver: Spell out exact signatures for atomic
access functions.
* include/bits/atomicity.h (__atomic_add_dispatch): Remove
volatile qualification for _Atomic_word argument.
(__atomic_add_single): Same.
(__exchange_and_add_dispatch): Same.
(__exchange_and_add_single): Same.
2006-08-29 Paolo Carlini <pcarlini@suse.de> 2006-08-29 Paolo Carlini <pcarlini@suse.de>
* include/tr1/random (subtract_with_carry_01<>::_M_initialize_npows): * include/tr1/random (subtract_with_carry_01<>::_M_initialize_npows):
......
...@@ -378,8 +378,8 @@ GLIBCXX_3.4 { ...@@ -378,8 +378,8 @@ GLIBCXX_3.4 {
# __gnu_cxx::__atomic_add # __gnu_cxx::__atomic_add
# __gnu_cxx::__exchange_and_add # __gnu_cxx::__exchange_and_add
_ZN9__gnu_cxx12__atomic_add*; _ZN9__gnu_cxx12__atomic_addEPVii;
_ZN9__gnu_cxx18__exchange_and_add*; _ZN9__gnu_cxx18__exchange_and_addEPVii;
# debug mode # debug mode
_ZN10__gnu_norm15_List_node_base4hook*; _ZN10__gnu_norm15_List_node_base4hook*;
......
...@@ -60,7 +60,7 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx) ...@@ -60,7 +60,7 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
#endif #endif
static inline _Atomic_word static inline _Atomic_word
__exchange_and_add_single(volatile _Atomic_word* __mem, int __val) __exchange_and_add_single(_Atomic_word* __mem, int __val)
{ {
_Atomic_word __result = *__mem; _Atomic_word __result = *__mem;
*__mem += __val; *__mem += __val;
...@@ -68,12 +68,12 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx) ...@@ -68,12 +68,12 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
} }
static inline void static inline void
__atomic_add_single(volatile _Atomic_word* __mem, int __val) __atomic_add_single(_Atomic_word* __mem, int __val)
{ *__mem += __val; } { *__mem += __val; }
static inline _Atomic_word static inline _Atomic_word
__attribute__ ((__unused__)) __attribute__ ((__unused__))
__exchange_and_add_dispatch(volatile _Atomic_word* __mem, int __val) __exchange_and_add_dispatch(_Atomic_word* __mem, int __val)
{ {
#ifdef __GTHREADS #ifdef __GTHREADS
if (__gthread_active_p()) if (__gthread_active_p())
...@@ -87,7 +87,7 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx) ...@@ -87,7 +87,7 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
static inline void static inline void
__attribute__ ((__unused__)) __attribute__ ((__unused__))
__atomic_add_dispatch(volatile _Atomic_word* __mem, int __val) __atomic_add_dispatch(_Atomic_word* __mem, int __val)
{ {
#ifdef __GTHREADS #ifdef __GTHREADS
if (__gthread_active_p()) if (__gthread_active_p())
...@@ -101,8 +101,9 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx) ...@@ -101,8 +101,9 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
_GLIBCXX_END_NAMESPACE _GLIBCXX_END_NAMESPACE
// Even if the CPU doesn't need a memory barrier, we need to ensure that // Even if the CPU doesn't need a memory barrier, we need to ensure
// the compiler doesn't reorder memory accesses across the barriers. // that the compiler doesn't reorder memory accesses across the
// barriers.
#ifndef _GLIBCXX_READ_MEM_BARRIER #ifndef _GLIBCXX_READ_MEM_BARRIER
#define _GLIBCXX_READ_MEM_BARRIER __asm __volatile ("":::"memory") #define _GLIBCXX_READ_MEM_BARRIER __asm __volatile ("":::"memory")
#endif #endif
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment