|
Boost-Commit : |
Subject: [Boost-commit] svn:boost r84400 - in branches/release: boost/atomic boost/atomic/detail libs/atomic libs/atomic/test
From: tim_at_[hidden]
Date: 2013-05-21 05:54:10
Author: timblechmann
Date: 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
New Revision: 84400
URL: http://svn.boost.org/trac/boost/changeset/84400
Log:
atomic: merge fixes from trunk
Properties modified:
branches/release/boost/atomic/ (props changed)
branches/release/libs/atomic/ (props changed)
Text files modified:
branches/release/boost/atomic/detail/base.hpp | 67 ++++++++++++++++++
branches/release/boost/atomic/detail/cas32strong.hpp | 6
branches/release/boost/atomic/detail/cas32weak.hpp | 20 +++++
branches/release/boost/atomic/detail/cas64strong.hpp | 6
branches/release/boost/atomic/detail/gcc-ppc.hpp | 78 +++++++++++++++++++++
branches/release/boost/atomic/detail/gcc-sparcv9.hpp | 20 +++++
branches/release/boost/atomic/detail/gcc-x86.hpp | 139 +++++++++++++++++++++++++++++++++++----
branches/release/boost/atomic/detail/lockpool.hpp | 1
branches/release/boost/atomic/detail/windows.hpp | 18 ++++
branches/release/libs/atomic/test/api_test_helpers.hpp | 35 ++++++----
branches/release/libs/atomic/test/lockfree.cpp | 6 +
11 files changed, 352 insertions(+), 44 deletions(-)
Modified: branches/release/boost/atomic/detail/base.hpp
==============================================================================
--- branches/release/boost/atomic/detail/base.hpp (original)
+++ branches/release/boost/atomic/detail/base.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -79,6 +79,43 @@
return fetch_sub(v) - v; \
} \
+#define BOOST_ATOMIC_DECLARE_VOID_POINTER_ADDITIVE_OPERATORS \
+ value_type \
+ operator++(int) volatile BOOST_NOEXCEPT \
+ { \
+ return fetch_add(1); \
+ } \
+ \
+ value_type \
+ operator++(void) volatile BOOST_NOEXCEPT \
+ { \
+ return (char*)fetch_add(1) + 1; \
+ } \
+ \
+ value_type \
+ operator--(int) volatile BOOST_NOEXCEPT \
+ { \
+ return fetch_sub(1); \
+ } \
+ \
+ value_type \
+ operator--(void) volatile BOOST_NOEXCEPT \
+ { \
+ return (char*)fetch_sub(1) - 1; \
+ } \
+ \
+ value_type \
+ operator+=(difference_type v) volatile BOOST_NOEXCEPT \
+ { \
+ return (char*)fetch_add(v) + v; \
+ } \
+ \
+ value_type \
+ operator-=(difference_type v) volatile BOOST_NOEXCEPT \
+ { \
+ return (char*)fetch_sub(v) - v; \
+ } \
+
#define BOOST_ATOMIC_DECLARE_BIT_OPERATORS \
value_type \
operator&=(difference_type v) volatile BOOST_NOEXCEPT \
@@ -102,6 +139,10 @@
BOOST_ATOMIC_DECLARE_BASE_OPERATORS \
BOOST_ATOMIC_DECLARE_ADDITIVE_OPERATORS \
+#define BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS \
+ BOOST_ATOMIC_DECLARE_BASE_OPERATORS \
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_ADDITIVE_OPERATORS \
+
#define BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS \
BOOST_ATOMIC_DECLARE_BASE_OPERATORS \
BOOST_ATOMIC_DECLARE_ADDITIVE_OPERATORS \
@@ -444,6 +485,7 @@
{
private:
typedef base_atomic this_type;
+ typedef ptrdiff_t difference_type;
typedef void * value_type;
typedef lockpool::scoped_lock guard_type;
public:
@@ -506,7 +548,30 @@
return false;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type fetch_add(difference_type v, memory_order /*order*/ = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ guard_type guard(const_cast<value_type *>(&v_));
+
+ value_type old = v_;
+ char * cv = reinterpret_cast<char*>(old);
+ cv += v;
+ v_ = cv;
+ return old;
+ }
+
+ value_type fetch_sub(difference_type v, memory_order /*order*/ = memory_order_seq_cst) volatile
+ {
+ guard_type guard(const_cast<value_type *>(&v_));
+
+ value_type old = v_;
+ char * cv = reinterpret_cast<char*>(old);
+ cv -= v;
+ v_ = cv;
+ return old;
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
Modified: branches/release/boost/atomic/detail/cas32strong.hpp
==============================================================================
--- branches/release/boost/atomic/detail/cas32strong.hpp (original)
+++ branches/release/boost/atomic/detail/cas32strong.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -479,7 +479,7 @@
{
value_type original = load(memory_order_relaxed);
do {
- } while (!compare_exchange_weak(original, original + v, order, memory_order_relaxed));
+ } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed));
return original;
}
@@ -488,7 +488,7 @@
{
value_type original = load(memory_order_relaxed);
do {
- } while (!compare_exchange_weak(original, original - v, order, memory_order_relaxed));
+ } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed));
return original;
}
@@ -498,7 +498,7 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
Modified: branches/release/boost/atomic/detail/cas32weak.hpp
==============================================================================
--- branches/release/boost/atomic/detail/cas32weak.hpp (original)
+++ branches/release/boost/atomic/detail/cas32weak.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -509,7 +509,25 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed));
+ return original;
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed));
+ return original;
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
Modified: branches/release/boost/atomic/detail/cas64strong.hpp
==============================================================================
--- branches/release/boost/atomic/detail/cas64strong.hpp (original)
+++ branches/release/boost/atomic/detail/cas64strong.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -223,7 +223,7 @@
{
value_type original = load(memory_order_relaxed);
do {
- } while (!compare_exchange_weak(original, original + v, order, memory_order_relaxed));
+ } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed));
return original;
}
@@ -232,7 +232,7 @@
{
value_type original = load(memory_order_relaxed);
do {
- } while (!compare_exchange_weak(original, original - v, order, memory_order_relaxed));
+ } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed));
return original;
}
@@ -242,7 +242,7 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
Modified: branches/release/boost/atomic/detail/gcc-ppc.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-ppc.hpp (original)
+++ branches/release/boost/atomic/detail/gcc-ppc.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -1525,6 +1525,7 @@
class base_atomic<void *, void *, 4, Sign>
{
typedef base_atomic this_type;
+ typedef ptrdiff_t difference_type;
typedef void * value_type;
public:
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
@@ -1643,7 +1644,43 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original, tmp;
+ ppc_fence_before(order);
+ __asm__ (
+ "1:\n"
+ "lwarx %0,%y2\n"
+ "add %1,%0,%3\n"
+ "stwcx. %1,%y2\n"
+ "bne- 1b\n"
+ : "=&b" (original), "=&b" (tmp), "+Z"(v_)
+ : "b" (v)
+ : "cc");
+ ppc_fence_after(order);
+ return original;
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original, tmp;
+ ppc_fence_before(order);
+ __asm__ (
+ "1:\n"
+ "lwarx %0,%y2\n"
+ "sub %1,%0,%3\n"
+ "stwcx. %1,%y2\n"
+ "bne- 1b\n"
+ : "=&b" (original), "=&b" (tmp), "+Z"(v_)
+ : "b" (v)
+ : "cc");
+ ppc_fence_after(order);
+ return original;
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
@@ -1824,6 +1861,7 @@
class base_atomic<void *, void *, 8, Sign>
{
typedef base_atomic this_type;
+ typedef ptrdiff_t difference_type;
typedef void * value_type;
public:
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
@@ -1942,7 +1980,43 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original, tmp;
+ ppc_fence_before(order);
+ __asm__ (
+ "1:\n"
+ "ldarx %0,%y2\n"
+ "add %1,%0,%3\n"
+ "stdcx. %1,%y2\n"
+ "bne- 1b\n"
+ : "=&b" (original), "=&b" (tmp), "+Z"(v_)
+ : "b" (v)
+ : "cc");
+ ppc_fence_after(order);
+ return original;
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original, tmp;
+ ppc_fence_before(order);
+ __asm__ (
+ "1:\n"
+ "ldarx %0,%y2\n"
+ "sub %1,%0,%3\n"
+ "stdcx. %1,%y2\n"
+ "bne- 1b\n"
+ : "=&b" (original), "=&b" (tmp), "+Z"(v_)
+ : "b" (v)
+ : "cc");
+ ppc_fence_after(order);
+ return original;
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
Modified: branches/release/boost/atomic/detail/gcc-sparcv9.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-sparcv9.hpp (original)
+++ branches/release/boost/atomic/detail/gcc-sparcv9.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -792,6 +792,7 @@
class base_atomic<void *, void *, 4, Sign>
{
typedef base_atomic this_type;
+ typedef ptrdiff_t difference_type;
typedef void * value_type;
public:
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
@@ -857,7 +858,24 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type tmp = load(memory_order_relaxed);
+ do {} while(!compare_exchange_weak(tmp, (char*)tmp + v, order, memory_order_relaxed));
+ return tmp;
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type tmp = load(memory_order_relaxed);
+ do {} while(!compare_exchange_weak(tmp, (char*)tmp - v, order, memory_order_relaxed));
+ return tmp;
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
Modified: branches/release/boost/atomic/detail/gcc-x86.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-x86.hpp (original)
+++ branches/release/boost/atomic/detail/gcc-x86.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -28,6 +28,15 @@
#define BOOST_ATOMIC_X86_PAUSE() __asm__ __volatile__ ("pause\n")
+#if defined(__i386__) &&\
+ (\
+ defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) ||\
+ defined(__i586__) || defined(__i686__) || defined(__pentium4__) || defined(__nocona__) || defined(__core2__) || defined(__corei7__) ||\
+ defined(__k6__) || defined(__athlon__) || defined(__k8__) || defined(__amdfam10__) || defined(__bdver1__) || defined(__bdver2__) || defined(__bdver3__) || defined(__btver1__) || defined(__btver2__)\
+ )
+#define BOOST_ATOMIC_X86_HAS_CMPXCHG8B 1
+#endif
+
inline void
platform_fence_before(memory_order order)
{
@@ -198,10 +207,10 @@
#define BOOST_ATOMIC_INT_LOCK_FREE 2
#define BOOST_ATOMIC_LONG_LOCK_FREE 2
-#if defined(__x86_64__)
+#if defined(__x86_64__) || defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
#define BOOST_ATOMIC_LLONG_LOCK_FREE 2
#else
-#define BOOST_ATOMIC_LLONG_LOCK_FREE 1
+#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
#endif
#define BOOST_ATOMIC_POINTER_LOCK_FREE 2
@@ -808,6 +817,7 @@
class base_atomic<void *, void *, 4, Sign>
{
typedef base_atomic this_type;
+ typedef ptrdiff_t difference_type;
typedef void * value_type;
public:
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
@@ -875,7 +885,25 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(order);
+ __asm__ (
+ "lock ; xaddl %0, %1"
+ : "+r" (v), "+m" (v_)
+ );
+ platform_fence_after(order);
+ return reinterpret_cast<value_type>(v);
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return fetch_add(-v, order);
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
@@ -994,6 +1022,7 @@
class base_atomic<void *, void *, 8, Sign>
{
typedef base_atomic this_type;
+ typedef ptrdiff_t difference_type;
typedef void * value_type;
public:
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
@@ -1061,7 +1090,25 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(order);
+ __asm__ (
+ "lock ; xaddq %0, %1"
+ : "+r" (v), "+m" (v_)
+ );
+ platform_fence_after(order);
+ return reinterpret_cast<value_type>(v);
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return fetch_add(-v, order);
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
@@ -1580,11 +1627,11 @@
};
#endif
-#if !defined(__x86_64__) && (defined(__i686__) || defined (__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8))
+#if !defined(__x86_64__) && defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
template<typename T>
inline bool
-platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr)
+platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr) BOOST_NOEXCEPT
{
#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8
const T oldval = __sync_val_compare_and_swap(ptr, expected, desired);
@@ -1592,7 +1639,7 @@
expected = oldval;
return result;
#else
- int scratch;
+ uint32_t scratch;
T prev = expected;
/* Make sure ebx is saved and restored properly in case
this object is compiled as "position independent". Since
@@ -1614,7 +1661,7 @@
"lock; cmpxchg8b 0(%4)\n"
"movl %1, %%ebx\n"
: "=A" (prev), "=m" (scratch)
- : "D" ((int)desired), "c" ((int)(desired >> 32)), "S" (ptr), "0" (prev)
+ : "D" ((uint32_t)desired), "c" ((uint32_t)(desired >> 32)), "S" (ptr), "0" (prev)
: "memory");
bool success = (prev == expected);
expected = prev;
@@ -1622,14 +1669,47 @@
#endif
}
+// Intel 64 and IA-32 Architectures Software Developer's Manual, Volume 3A, 8.1.1. Guaranteed Atomic Operations:
+//
+// The Pentium processor (and newer processors since) guarantees that the following additional memory operations will always be carried out atomically:
+// * Reading or writing a quadword aligned on a 64-bit boundary
+//
+// Luckily, the memory is almost always 8-byte aligned in our case because atomic<> uses 64 bit native types for storage and dynamic memory allocations
+// have at least 8 byte alignment. The only unfortunate case is when atomic is placeod on the stack and it is not 8-byte aligned (like on 32 bit Windows).
+
template<typename T>
inline void
-platform_store64(T value, volatile T * ptr)
+platform_store64(T value, volatile T * ptr) BOOST_NOEXCEPT
{
- T expected = *ptr;
- for (; !platform_cmpxchg64_strong(expected, value, ptr);)
+ if (((uint32_t)ptr & 0x00000007) == 0)
{
- BOOST_ATOMIC_X86_PAUSE();
+#if defined(__SSE2__)
+ __asm__ __volatile__
+ (
+ "movq %1, %%xmm0\n\t"
+ "movq %%xmm0, %0\n\t"
+ : "=m" (*ptr)
+ : "m" (value)
+ : "memory", "xmm0"
+ );
+#else
+ __asm__ __volatile__
+ (
+ "fildll %1\n\t"
+ "fistpll %0\n\t"
+ : "=m" (*ptr)
+ : "m" (value)
+ : "memory"
+ );
+#endif
+ }
+ else
+ {
+ T expected = *ptr;
+ while (!platform_cmpxchg64_strong(expected, value, ptr))
+ {
+ BOOST_ATOMIC_X86_PAUSE();
+ }
}
}
@@ -1637,12 +1717,37 @@
inline T
platform_load64(const volatile T * ptr) BOOST_NOEXCEPT
{
- T expected = *ptr;
- for (; !platform_cmpxchg64_strong(expected, expected, const_cast<volatile T*>(ptr));)
+ T value = T();
+
+ if (((uint32_t)ptr & 0x00000007) == 0)
{
- BOOST_ATOMIC_X86_PAUSE();
+#if defined(__SSE2__)
+ __asm__ __volatile__
+ (
+ "movq %1, %%xmm0\n\t"
+ "movq %%xmm0, %0\n\t"
+ : "=m" (value)
+ : "m" (*ptr)
+ : "memory", "xmm0"
+ );
+#else
+ __asm__ __volatile__
+ (
+ "fildll %1\n\t"
+ "fistpll %0\n\t"
+ : "=m" (value)
+ : "m" (*ptr)
+ : "memory"
+ );
+#endif
}
- return expected;
+ else
+ {
+ // We don't care for comparison result here; the previous value will be stored into value anyway.
+ platform_cmpxchg64_strong(value, value, const_cast<volatile T*>(ptr));
+ }
+
+ return value;
}
#endif
@@ -1652,7 +1757,7 @@
}
/* pull in 64-bit atomic type using cmpxchg8b above */
-#if !defined(__x86_64__) && (defined(__i686__) || defined (__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8))
+#if !defined(__x86_64__) && defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
#include <boost/atomic/detail/cas64strong.hpp>
#endif
Modified: branches/release/boost/atomic/detail/lockpool.hpp
==============================================================================
--- branches/release/boost/atomic/detail/lockpool.hpp (original)
+++ branches/release/boost/atomic/detail/lockpool.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -61,6 +61,7 @@
{
private:
atomic_flag& flag_;
+ uint8_t padding[128 - sizeof(atomic_flag)];
scoped_lock(const scoped_lock &) /* = delete */;
scoped_lock& operator=(const scoped_lock &) /* = delete */;
Modified: branches/release/boost/atomic/detail/windows.hpp
==============================================================================
--- branches/release/boost/atomic/detail/windows.hpp (original)
+++ branches/release/boost/atomic/detail/windows.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -878,6 +878,7 @@
class base_atomic<void*, void*, sizeof_pointer, Sign>
{
typedef base_atomic this_type;
+ typedef ptrdiff_t difference_type;
typedef void* value_type;
public:
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
@@ -938,7 +939,22 @@
return true;
}
- BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(order);
+ value_type res = (value_type)BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_POINTER(&v_, v);
+ platform_fence_after(order);
+ return res;
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return fetch_add(-v, order);
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
private:
base_atomic(const base_atomic &) /* = delete */ ;
void operator=(const base_atomic &) /* = delete */ ;
Modified: branches/release/libs/atomic/test/api_test_helpers.hpp
==============================================================================
--- branches/release/libs/atomic/test/api_test_helpers.hpp (original)
+++ branches/release/libs/atomic/test/api_test_helpers.hpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -111,8 +111,8 @@
#endif
}
-template<typename T, typename D>
-void test_additive_operators(T value, D delta)
+template<typename T, typename D, typename AddType>
+void test_additive_operators_with_type(T value, D delta)
{
/* note: the tests explicitly cast the result of any addition
to the type to be tested to force truncation of the result to
@@ -122,14 +122,14 @@
{
boost::atomic<T> a(value);
T n = a.fetch_add(delta);
- BOOST_CHECK( a.load() == T(value + delta) );
+ BOOST_CHECK( a.load() == T((AddType)value + delta) );
BOOST_CHECK( n == value );
}
{
boost::atomic<T> a(value);
T n = a.fetch_sub(delta);
- BOOST_CHECK( a.load() == T(value - delta) );
+ BOOST_CHECK( a.load() == T((AddType)value - delta) );
BOOST_CHECK( n == value );
}
@@ -137,47 +137,53 @@
{
boost::atomic<T> a(value);
T n = (a += delta);
- BOOST_CHECK( a.load() == T(value + delta) );
- BOOST_CHECK( n == T(value + delta) );
+ BOOST_CHECK( a.load() == T((AddType)value + delta) );
+ BOOST_CHECK( n == T((AddType)value + delta) );
}
{
boost::atomic<T> a(value);
T n = (a -= delta);
- BOOST_CHECK( a.load() == T(value - delta) );
- BOOST_CHECK( n == T(value - delta) );
+ BOOST_CHECK( a.load() == T((AddType)value - delta) );
+ BOOST_CHECK( n == T((AddType)value - delta) );
}
/* overloaded increment/decrement */
{
boost::atomic<T> a(value);
T n = a++;
- BOOST_CHECK( a.load() == T(value + 1) );
+ BOOST_CHECK( a.load() == T((AddType)value + 1) );
BOOST_CHECK( n == value );
}
{
boost::atomic<T> a(value);
T n = ++a;
- BOOST_CHECK( a.load() == T(value + 1) );
- BOOST_CHECK( n == T(value + 1) );
+ BOOST_CHECK( a.load() == T((AddType)value + 1) );
+ BOOST_CHECK( n == T((AddType)value + 1) );
}
{
boost::atomic<T> a(value);
T n = a--;
- BOOST_CHECK( a.load() == T(value - 1) );
+ BOOST_CHECK( a.load() == T((AddType)value - 1) );
BOOST_CHECK( n == value );
}
{
boost::atomic<T> a(value);
T n = --a;
- BOOST_CHECK( a.load() == T(value - 1) );
- BOOST_CHECK( n == T(value - 1) );
+ BOOST_CHECK( a.load() == T((AddType)value - 1) );
+ BOOST_CHECK( n == T((AddType)value - 1) );
}
}
+template<typename T, typename D>
+void test_additive_operators(T value, D delta)
+{
+ test_additive_operators_with_type<T, D, T>(value, delta);
+}
+
template<typename T>
void test_additive_wrap(T value)
{
@@ -275,6 +281,7 @@
test_additive_operators<T*>(&values[1], 1);
test_base_operators<void*>(&values[0], &values[1], &values[2]);
+ test_additive_operators_with_type<void*, int, char*>(&values[1], 1);
boost::atomic<void *> ptr;
boost::atomic<intptr_t> integral;
Modified: branches/release/libs/atomic/test/lockfree.cpp
==============================================================================
--- branches/release/libs/atomic/test/lockfree.cpp (original)
+++ branches/release/libs/atomic/test/lockfree.cpp 2013-05-21 05:54:09 EDT (Tue, 21 May 2013)
@@ -43,7 +43,11 @@
#define EXPECT_SHORT_LOCK_FREE 2
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE 2
-#define EXPECT_LLONG_LOCK_FREE 1
+#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
+#define EXPECT_LLONG_LOCK_FREE 2
+#else
+#define EXPECT_LLONG_LOCK_FREE 0
+#endif
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
Boost-Commit list run by bdawes at acm.org, david.abrahams at rcn.com, gregod at cs.rpi.edu, cpdaniel at pacbell.net, john at johnmaddock.co.uk