|
Boost-Commit : |
Subject: [Boost-commit] svn:boost r85092 - in branches/release: boost/atomic boost/atomic/detail libs/atomic libs/atomic/doc libs/atomic/src libs/atomic/test
From: andrey.semashev_at_[hidden]
Date: 2013-07-20 14:01:35
Author: andysem
Date: 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013)
New Revision: 85092
URL: http://svn.boost.org/trac/boost/changeset/85092
Log:
Merged recent changes from trunk.
Added:
branches/release/boost/atomic/detail/cas128strong.hpp
- copied unchanged from r85090, trunk/boost/atomic/detail/cas128strong.hpp
branches/release/boost/atomic/detail/cas64strong-ptr.hpp
- copied unchanged from r85090, trunk/boost/atomic/detail/cas64strong-ptr.hpp
branches/release/boost/atomic/detail/gcc-atomic.hpp
- copied unchanged from r85090, trunk/boost/atomic/detail/gcc-atomic.hpp
Properties modified:
branches/release/boost/atomic/ (props changed)
branches/release/libs/atomic/ (props changed)
Text files modified:
branches/release/boost/atomic/atomic.hpp | 53 -
branches/release/boost/atomic/detail/base.hpp | 70 +
branches/release/boost/atomic/detail/cas128strong.hpp | 286 +++++++++
branches/release/boost/atomic/detail/cas32strong.hpp | 114 ++
branches/release/boost/atomic/detail/cas32weak.hpp | 162 +++-
branches/release/boost/atomic/detail/cas64strong-ptr.hpp | 247 ++++++++
branches/release/boost/atomic/detail/cas64strong.hpp | 223 ------
branches/release/boost/atomic/detail/config.hpp | 6
branches/release/boost/atomic/detail/gcc-alpha.hpp | 2
branches/release/boost/atomic/detail/gcc-armv6plus.hpp | 4
branches/release/boost/atomic/detail/gcc-atomic.hpp | 1204 ++++++++++++++++++++++++++++++++++++++++
branches/release/boost/atomic/detail/gcc-cas.hpp | 67 +-
branches/release/boost/atomic/detail/gcc-ppc.hpp | 280 ++++++--
branches/release/boost/atomic/detail/gcc-sparcv9.hpp | 246 +++++--
branches/release/boost/atomic/detail/gcc-x86.hpp | 573 +++++++++++++-----
branches/release/boost/atomic/detail/generic-cas.hpp | 2
branches/release/boost/atomic/detail/interlocked.hpp | 9
branches/release/boost/atomic/detail/linux-arm.hpp | 3
branches/release/boost/atomic/detail/lockpool.hpp | 15
branches/release/boost/atomic/detail/platform.hpp | 10
branches/release/boost/atomic/detail/type-classification.hpp | 4
branches/release/boost/atomic/detail/windows.hpp | 572 ++++++++++++------
branches/release/libs/atomic/doc/atomic.qbk | 40 +
branches/release/libs/atomic/src/lockpool.cpp | 35 +
branches/release/libs/atomic/test/api_test_helpers.hpp | 4
branches/release/libs/atomic/test/lockfree.cpp | 34
branches/release/libs/atomic/test/native_api.cpp | 8
27 files changed, 3343 insertions(+), 930 deletions(-)
Modified: branches/release/boost/atomic/atomic.hpp
==============================================================================
--- branches/release/boost/atomic/atomic.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/atomic.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -22,7 +22,7 @@
#include <boost/mpl/and.hpp>
#endif
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -60,6 +60,10 @@
#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
#endif
+#ifndef BOOST_ATOMIC_INT128_LOCK_FREE
+#define BOOST_ATOMIC_INT128_LOCK_FREE 0
+#endif
+
#ifndef BOOST_ATOMIC_POINTER_LOCK_FREE
#define BOOST_ATOMIC_POINTER_LOCK_FREE 0
#endif
@@ -112,11 +116,18 @@
mpl::and_< boost::is_integral<T>, boost::is_signed<T> >::value
#endif
> super;
+ typedef typename super::value_arg_type value_arg_type;
+
public:
- atomic(void) BOOST_NOEXCEPT : super() {}
- BOOST_CONSTEXPR atomic(value_type v) BOOST_NOEXCEPT : super(v) {}
+ BOOST_DEFAULTED_FUNCTION(atomic(void), BOOST_NOEXCEPT {})
+
+ // NOTE: The constructor is made explicit because gcc 4.7 complains that
+ // operator=(value_arg_type) is considered ambiguous with operator=(atomic const&)
+ // in assignment expressions, even though conversion to atomic<> is less preferred
+ // than conversion to value_arg_type.
+ explicit BOOST_CONSTEXPR atomic(value_arg_type v) BOOST_NOEXCEPT : super(v) {}
- value_type operator=(value_type v) volatile BOOST_NOEXCEPT
+ value_type operator=(value_arg_type v) volatile BOOST_NOEXCEPT
{
this->store(v);
return v;
@@ -127,14 +138,8 @@
return this->load();
}
-#ifdef BOOST_NO_CXX11_DELETED_FUNCTIONS
-private:
- atomic(const atomic &) /* =delete */ ;
- atomic & operator=(const atomic &) volatile /* =delete */ ;
-#else
- atomic(const atomic &) = delete;
- atomic & operator=(const atomic &) volatile = delete;
-#endif
+ BOOST_DELETED_FUNCTION(atomic(atomic const&))
+ BOOST_DELETED_FUNCTION(atomic& operator=(atomic const&) volatile)
};
typedef atomic<char> atomic_char;
@@ -190,25 +195,9 @@
typedef atomic<std::size_t> atomic_size_t;
typedef atomic<std::ptrdiff_t> atomic_ptrdiff_t;
-// PGI seems to not support intptr_t/uintptr_t properly. BOOST_HAS_STDINT_H is not defined for this compiler by Boost.Config.
-#if !defined(__PGIC__)
-
-#if (defined(BOOST_WINDOWS) && !defined(_WIN32_WCE)) \
- || (defined(_XOPEN_UNIX) && (_XOPEN_UNIX+0 > 0) && !defined(__UCLIBC__)) \
- || defined(__CYGWIN__) \
- || defined(macintosh) || defined(__APPLE__) || defined(__APPLE_CC__) \
- || defined(__FreeBSD__) || defined(__NetBSD__) || defined(__OpenBSD__) || defined(__DragonFly__)
+#if defined(BOOST_HAS_INTPTR_T)
typedef atomic<intptr_t> atomic_intptr_t;
typedef atomic<uintptr_t> atomic_uintptr_t;
-#elif defined(__GNUC__) || defined(__clang__)
-#if defined(__INTPTR_TYPE__)
-typedef atomic< __INTPTR_TYPE__ > atomic_intptr_t;
-#endif
-#if defined(__UINTPTR_TYPE__)
-typedef atomic< __UINTPTR_TYPE__ > atomic_uintptr_t;
-#endif
-#endif
-
#endif
#ifndef BOOST_ATOMIC_FLAG_LOCK_FREE
@@ -229,9 +218,11 @@
{
v_.store(false, order);
}
+
+ BOOST_DELETED_FUNCTION(atomic_flag(atomic_flag const&))
+ BOOST_DELETED_FUNCTION(atomic_flag& operator=(atomic_flag const&))
+
private:
- atomic_flag(const atomic_flag &) /* = delete */ ;
- atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
atomic<bool> v_;
};
#endif
Modified: branches/release/boost/atomic/detail/base.hpp
==============================================================================
--- branches/release/boost/atomic/detail/base.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/base.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -19,7 +19,7 @@
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/lockpool.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -155,13 +155,14 @@
inline memory_order
calculate_failure_order(memory_order order)
{
- switch(order) {
- case memory_order_acq_rel:
- return memory_order_acquire;
- case memory_order_release:
- return memory_order_relaxed;
- default:
- return order;
+ switch(order)
+ {
+ case memory_order_acq_rel:
+ return memory_order_acquire;
+ case memory_order_release:
+ return memory_order_relaxed;
+ default:
+ return order;
}
}
@@ -172,11 +173,12 @@
typedef base_atomic this_type;
typedef T value_type;
typedef lockpool::scoped_lock guard_type;
- typedef char storage_type[sizeof(value_type)];
-public:
- base_atomic(void) {}
+protected:
+ typedef value_type const& value_arg_type;
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(v)
{}
@@ -249,15 +251,16 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
char * storage_ptr() volatile const BOOST_NOEXCEPT
{
return const_cast<char *>(&reinterpret_cast<char const volatile &>(v_));
}
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
-
T v_;
};
@@ -269,9 +272,13 @@
typedef T value_type;
typedef T difference_type;
typedef lockpool::scoped_lock guard_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order /*order*/ = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -381,9 +388,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -393,11 +402,15 @@
private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef lockpool::scoped_lock guard_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order /*order*/ = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -474,9 +487,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -485,12 +500,16 @@
{
private:
typedef base_atomic this_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef void * value_type;
typedef lockpool::scoped_lock guard_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order /*order*/ = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -572,9 +591,10 @@
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
Copied: branches/release/boost/atomic/detail/cas128strong.hpp (from r85090, trunk/boost/atomic/detail/cas128strong.hpp)
==============================================================================
--- /dev/null 00:00:00 1970 (empty, because file is newly added)
+++ branches/release/boost/atomic/detail/cas128strong.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092, copy of r85090, trunk/boost/atomic/detail/cas128strong.hpp)
@@ -0,0 +1,286 @@
+#ifndef BOOST_ATOMIC_DETAIL_CAS128STRONG_HPP
+#define BOOST_ATOMIC_DETAIL_CAS128STRONG_HPP
+
+// Distributed under the Boost Software License, Version 1.0.
+// See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// Copyright (c) 2011 Helge Bahmann
+// Copyright (c) 2013 Tim Blechmann, Andrey Semashev
+
+// Build 128-bit atomic operation on integers/UDTs from platform_cmpxchg128_strong
+// primitive. It is assumed that 128-bit loads/stores are not
+// atomic, so they are implemented through platform_load128/platform_store128.
+
+#include <string.h>
+#include <cstddef>
+#include <boost/cstdint.hpp>
+#include <boost/memory_order.hpp>
+#include <boost/atomic/detail/config.hpp>
+#include <boost/atomic/detail/base.hpp>
+
+#ifdef BOOST_HAS_PRAGMA_ONCE
+#pragma once
+#endif
+
+namespace boost {
+namespace atomics {
+namespace detail {
+
+/* integral types */
+
+template<typename T, bool Sign>
+class base_atomic<T, int, 16, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void
+ store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before_store(order);
+ platform_store128(v, &v_);
+ platform_fence_after_store(order);
+ }
+
+ value_type
+ load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ value_type v = platform_load128(&v_);
+ platform_fence_after_load(order);
+ return v;
+ }
+
+ value_type
+ exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, v, order, memory_order_relaxed));
+ return original;
+ }
+
+ bool
+ compare_exchange_weak(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return compare_exchange_strong(expected, desired, success_order, failure_order);
+ }
+
+ bool
+ compare_exchange_strong(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(success_order);
+
+ bool success = platform_cmpxchg128_strong(expected, desired, &v_);
+
+ if (success) {
+ platform_fence_after(success_order);
+ } else {
+ platform_fence_after(failure_order);
+ }
+
+ return success;
+ }
+
+ value_type
+ fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, original + v, order, memory_order_relaxed));
+ return original;
+ }
+
+ value_type
+ fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, original - v, order, memory_order_relaxed));
+ return original;
+ }
+
+ value_type
+ fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, original & v, order, memory_order_relaxed));
+ return original;
+ }
+
+ value_type
+ fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, original | v, order, memory_order_relaxed));
+ return original;
+ }
+
+ value_type
+ fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, original ^ v, order, memory_order_relaxed));
+ return original;
+ }
+
+ bool
+ is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return true;
+ }
+
+ BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+/* generic types */
+
+#if defined(BOOST_HAS_INT128)
+
+typedef boost::uint128_type storage128_type;
+
+#else // defined(BOOST_HAS_INT128)
+
+struct BOOST_ALIGNMENT(16) storage128_type
+{
+ uint64_t data[2];
+};
+
+inline bool operator== (storage128_type const& left, storage128_type const& right)
+{
+ return left.data[0] == right.data[0] && left.data[1] == right.data[1];
+}
+inline bool operator!= (storage128_type const& left, storage128_type const& right)
+{
+ return !(left == right);
+}
+
+#endif // defined(BOOST_HAS_INT128)
+
+template<typename T, bool Sign>
+class base_atomic<T, void, 16, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef storage128_type storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
+ {
+ memcpy(&v_, &v, sizeof(value_type));
+ }
+
+ void
+ store(value_type const& value, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ storage_type value_s = 0;
+ memcpy(&value_s, &value, sizeof(value_type));
+ platform_fence_before_store(order);
+ platform_store128(value_s, &v_);
+ platform_fence_after_store(order);
+ }
+
+ value_type
+ load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ storage_type value_s = platform_load128(&v_);
+ platform_fence_after_load(order);
+ value_type value;
+ memcpy(&value, &value_s, sizeof(value_type));
+ return value;
+ }
+
+ value_type
+ exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, v, order, memory_order_relaxed));
+ return original;
+ }
+
+ bool
+ compare_exchange_weak(
+ value_type & expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return compare_exchange_strong(expected, desired, success_order, failure_order);
+ }
+
+ bool
+ compare_exchange_strong(
+ value_type & expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ storage_type expected_s = 0, desired_s = 0;
+ memcpy(&expected_s, &expected, sizeof(value_type));
+ memcpy(&desired_s, &desired, sizeof(value_type));
+
+ platform_fence_before(success_order);
+ bool success = platform_cmpxchg128_strong(expected_s, desired_s, &v_);
+
+ if (success) {
+ platform_fence_after(success_order);
+ } else {
+ platform_fence_after(failure_order);
+ memcpy(&expected, &expected_s, sizeof(value_type));
+ }
+
+ return success;
+ }
+
+ bool
+ is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return true;
+ }
+
+ BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ storage_type v_;
+};
+
+}
+}
+}
+
+#endif
Modified: branches/release/boost/atomic/detail/cas32strong.hpp
==============================================================================
--- branches/release/boost/atomic/detail/cas32strong.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/cas32strong.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -12,13 +12,14 @@
// Build 8-, 16- and 32-bit atomic operations from
// a platform_cmpxchg32_strong primitive.
+#include <string.h>
#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/base.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -31,13 +32,18 @@
template<typename T, bool Sign>
class base_atomic<T, int, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -150,22 +156,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -278,21 +291,28 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -401,9 +421,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -412,12 +434,17 @@
template<bool Sign>
class base_atomic<void *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef void * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -499,21 +526,28 @@
}
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -595,9 +629,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -606,17 +642,21 @@
template<typename T, bool Sign>
class base_atomic<T, void, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) ) volatile BOOST_NOEXCEPT
{
@@ -688,26 +728,32 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) ) volatile BOOST_NOEXCEPT
{
@@ -780,26 +826,32 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -872,9 +924,11 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
Modified: branches/release/boost/atomic/detail/cas32weak.hpp
==============================================================================
--- branches/release/boost/atomic/detail/cas32weak.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/cas32weak.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -9,13 +9,14 @@
// Copyright (c) 2013 Tim Blechmann
+#include <string.h>
#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/base.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -28,13 +29,18 @@
template<typename T, bool Sign>
class base_atomic<T, int, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -92,11 +98,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -155,22 +163,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -228,11 +243,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -291,21 +308,28 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -359,11 +383,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -422,9 +448,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -433,12 +461,17 @@
template<bool Sign>
class base_atomic<void *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef void * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -492,11 +525,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -528,21 +563,28 @@
}
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -596,11 +638,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -632,9 +676,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -643,17 +689,21 @@
template<typename T, bool Sign>
class base_atomic<T, void, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -716,11 +766,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -734,26 +786,32 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -816,11 +874,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -834,26 +894,32 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -916,11 +982,13 @@
memory_order success_order,
memory_order failure_order) volatile BOOST_NOEXCEPT
{
- for(;;) {
+ while (true)
+ {
value_type tmp = expected;
if (compare_exchange_weak(tmp, desired, success_order, failure_order))
return true;
- if (tmp != expected) {
+ if (tmp != expected)
+ {
expected = tmp;
return false;
}
@@ -934,9 +1002,11 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
Copied: branches/release/boost/atomic/detail/cas64strong-ptr.hpp (from r85090, trunk/boost/atomic/detail/cas64strong-ptr.hpp)
==============================================================================
--- /dev/null 00:00:00 1970 (empty, because file is newly added)
+++ branches/release/boost/atomic/detail/cas64strong-ptr.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092, copy of r85090, trunk/boost/atomic/detail/cas64strong-ptr.hpp)
@@ -0,0 +1,247 @@
+#ifndef BOOST_ATOMIC_DETAIL_CAS64STRONG_PTR_HPP
+#define BOOST_ATOMIC_DETAIL_CAS64STRONG_PTR_HPP
+
+// Distributed under the Boost Software License, Version 1.0.
+// See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// Copyright (c) 2011 Helge Bahmann
+// Copyright (c) 2013 Tim Blechmann
+
+// Build 64-bit atomic operation on pointers from platform_cmpxchg64_strong
+// primitive. It is assumed that 64-bit loads/stores are not
+// atomic, so they are implemented through platform_load64/platform_store64.
+//
+// The reason for extracting pointer specializations to a separate header is
+// that 64-bit CAS is available on some 32-bit platforms (notably, x86).
+// On these platforms there is no need for 64-bit pointer specializations,
+// since they will never be used.
+
+#include <string.h>
+#include <cstddef>
+#include <boost/cstdint.hpp>
+#include <boost/memory_order.hpp>
+#include <boost/atomic/detail/config.hpp>
+#include <boost/atomic/detail/base.hpp>
+
+#ifdef BOOST_HAS_PRAGMA_ONCE
+#pragma once
+#endif
+
+namespace boost {
+namespace atomics {
+namespace detail {
+
+/* pointer types */
+
+template<bool Sign>
+class base_atomic<void *, void *, 8, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef void * value_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void
+ store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before_store(order);
+ platform_store64(v, &v_);
+ platform_fence_after_store(order);
+ }
+
+ value_type
+ load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ value_type v = platform_load64(&v_);
+ platform_fence_after_load(order);
+ return v;
+ }
+
+ value_type
+ exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, v, order, memory_order_relaxed));
+ return original;
+ }
+
+ bool
+ compare_exchange_weak(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return compare_exchange_strong(expected, desired, success_order, failure_order);
+ }
+
+ bool
+ compare_exchange_strong(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(success_order);
+
+ bool success = platform_cmpxchg64_strong(expected, desired, &v_);
+
+ if (success) {
+ platform_fence_after(success_order);
+ } else {
+ platform_fence_after(failure_order);
+ }
+
+ return success;
+ }
+
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed));
+ return original;
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed));
+ return original;
+ }
+
+ bool
+ is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return true;
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+template<typename T, bool Sign>
+class base_atomic<T *, void *, 8, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T * value_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void
+ store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before_store(order);
+ platform_store64(v, &v_);
+ platform_fence_after_store(order);
+ }
+
+ value_type
+ load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ value_type v = platform_load64(&v_);
+ platform_fence_after_load(order);
+ return v;
+ }
+
+ value_type
+ exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, v, order, memory_order_relaxed));
+ return original;
+ }
+
+ bool
+ compare_exchange_weak(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return compare_exchange_strong(expected, desired, success_order, failure_order);
+ }
+
+ bool
+ compare_exchange_strong(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(success_order);
+
+ bool success = platform_cmpxchg64_strong(expected, desired, &v_);
+
+ if (success) {
+ platform_fence_after(success_order);
+ } else {
+ platform_fence_after(failure_order);
+ }
+
+ return success;
+ }
+
+ value_type
+ fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, original + v, order, memory_order_relaxed));
+ return original;
+ }
+
+ value_type
+ fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type original = load(memory_order_relaxed);
+ do {
+ } while (!compare_exchange_weak(original, original - v, order, memory_order_relaxed));
+ return original;
+ }
+
+ bool
+ is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return true;
+ }
+
+ BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+}
+}
+}
+
+#endif
Modified: branches/release/boost/atomic/detail/cas64strong.hpp
==============================================================================
--- branches/release/boost/atomic/detail/cas64strong.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/cas64strong.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -8,17 +8,18 @@
// Copyright (c) 2011 Helge Bahmann
// Copyright (c) 2013 Tim Blechmann
-// Build 64-bit atomic operation from platform_cmpxchg64_strong
+// Build 64-bit atomic operation on integers/UDTs from platform_cmpxchg64_strong
// primitive. It is assumed that 64-bit loads/stores are not
-// atomic, so they are funnelled through cmpxchg as well.
+// atomic, so they are implemented through platform_load64/platform_store64.
+#include <string.h>
#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/base.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -31,12 +32,17 @@
template<typename T, bool Sign>
class base_atomic<T, int, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -145,203 +151,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
-private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
- value_type v_;
-};
-
-/* pointer types */
-
-template<bool Sign>
-class base_atomic<void *, void *, 8, Sign>
-{
- typedef base_atomic this_type;
- typedef void * value_type;
- typedef ptrdiff_t difference_type;
-public:
- BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
-
- void
- store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- platform_fence_before_store(order);
- platform_store64(v, &v_);
- platform_fence_after_store(order);
- }
-
- value_type
- load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
- {
- value_type v = platform_load64(&v_);
- platform_fence_after_load(order);
- return v;
- }
-
- value_type
- exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- value_type original = load(memory_order_relaxed);
- do {
- } while (!compare_exchange_weak(original, v, order, memory_order_relaxed));
- return original;
- }
-
- bool
- compare_exchange_weak(
- value_type & expected,
- value_type desired,
- memory_order success_order,
- memory_order failure_order) volatile BOOST_NOEXCEPT
- {
- return compare_exchange_strong(expected, desired, success_order, failure_order);
- }
-
- bool
- compare_exchange_strong(
- value_type & expected,
- value_type desired,
- memory_order success_order,
- memory_order failure_order) volatile BOOST_NOEXCEPT
- {
- platform_fence_before(success_order);
-
- bool success = platform_cmpxchg64_strong(expected, desired, &v_);
-
- if (success) {
- platform_fence_after(success_order);
- } else {
- platform_fence_after(failure_order);
- }
-
- return success;
- }
-
- value_type
- fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- value_type original = load(memory_order_relaxed);
- do {
- } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed));
- return original;
- }
-
- value_type
- fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- value_type original = load(memory_order_relaxed);
- do {
- } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed));
- return original;
- }
-
- bool
- is_lock_free(void) const volatile BOOST_NOEXCEPT
- {
- return true;
- }
-
- BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
-private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
- value_type v_;
-};
-
-template<typename T, bool Sign>
-class base_atomic<T *, void *, 8, Sign>
-{
- typedef base_atomic this_type;
- typedef T * value_type;
- typedef ptrdiff_t difference_type;
-public:
- BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
- void
- store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- platform_fence_before_store(order);
- platform_store64(v, &v_);
- platform_fence_after_store(order);
- }
-
- value_type
- load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
- {
- value_type v = platform_load64(&v_);
- platform_fence_after_load(order);
- return v;
- }
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
- value_type
- exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- value_type original = load(memory_order_relaxed);
- do {
- } while (!compare_exchange_weak(original, v, order, memory_order_relaxed));
- return original;
- }
-
- bool
- compare_exchange_weak(
- value_type & expected,
- value_type desired,
- memory_order success_order,
- memory_order failure_order) volatile BOOST_NOEXCEPT
- {
- return compare_exchange_strong(expected, desired, success_order, failure_order);
- }
-
- bool
- compare_exchange_strong(
- value_type & expected,
- value_type desired,
- memory_order success_order,
- memory_order failure_order) volatile BOOST_NOEXCEPT
- {
- platform_fence_before(success_order);
-
- bool success = platform_cmpxchg64_strong(expected, desired, &v_);
-
- if (success) {
- platform_fence_after(success_order);
- } else {
- platform_fence_after(failure_order);
- }
-
- return success;
- }
-
- value_type
- fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- value_type original = load(memory_order_relaxed);
- do {
- } while (!compare_exchange_weak(original, original + v, order, memory_order_relaxed));
- return original;
- }
-
- value_type
- fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- value_type original = load(memory_order_relaxed);
- do {
- } while (!compare_exchange_weak(original, original - v, order, memory_order_relaxed));
- return original;
- }
-
- bool
- is_lock_free(void) const volatile BOOST_NOEXCEPT
- {
- return true;
- }
-
- BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -350,15 +164,20 @@
template<typename T, bool Sign>
class base_atomic<T, void, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint64_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& value, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -430,9 +249,11 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
Modified: branches/release/boost/atomic/detail/config.hpp
==============================================================================
--- branches/release/boost/atomic/detail/config.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/config.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -9,11 +9,7 @@
#include <boost/config.hpp>
-#if (defined(_MSC_VER) && (_MSC_VER >= 1020)) || defined(__GNUC__) || defined(BOOST_CLANG) || defined(BOOST_INTEL) || defined(__COMO__) || defined(__DMC__)
-#define BOOST_ATOMIC_HAS_PRAGMA_ONCE
-#endif
-
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
Modified: branches/release/boost/atomic/detail/gcc-alpha.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-alpha.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/gcc-alpha.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -12,7 +12,7 @@
#include <boost/atomic/detail/base.hpp>
#include <boost/atomic/detail/builder.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
Modified: branches/release/boost/atomic/detail/gcc-armv6plus.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-armv6plus.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/gcc-armv6plus.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -10,11 +10,10 @@
// Copyright (c) 2013 Tim Blechmann
// ARM Code by Phil Endecott, based on other architectures.
-#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -222,6 +221,7 @@
return expected;
}
};
+
#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
}
Copied: branches/release/boost/atomic/detail/gcc-atomic.hpp (from r85090, trunk/boost/atomic/detail/gcc-atomic.hpp)
==============================================================================
--- /dev/null 00:00:00 1970 (empty, because file is newly added)
+++ branches/release/boost/atomic/detail/gcc-atomic.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092, copy of r85090, trunk/boost/atomic/detail/gcc-atomic.hpp)
@@ -0,0 +1,1204 @@
+#ifndef BOOST_ATOMIC_DETAIL_GCC_ATOMIC_HPP
+#define BOOST_ATOMIC_DETAIL_GCC_ATOMIC_HPP
+
+// Copyright (c) 2013 Andrey Semashev
+//
+// Distributed under the Boost Software License, Version 1.0.
+// See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include <string.h>
+#include <cstddef>
+#include <boost/cstdint.hpp>
+#include <boost/atomic/detail/config.hpp>
+
+#ifdef BOOST_HAS_PRAGMA_ONCE
+#pragma once
+#endif
+
+namespace boost {
+namespace atomics {
+namespace detail {
+
+#if (defined(__i386__) && defined(__SSE2__)) || defined(__x86_64__)
+#define BOOST_ATOMIC_X86_PAUSE() __asm__ __volatile__ ("pause\n")
+#endif
+
+#if defined(__i386__) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)
+#define BOOST_ATOMIC_X86_HAS_CMPXCHG8B 1
+#endif
+
+#if defined(__x86_64__) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)
+#define BOOST_ATOMIC_X86_HAS_CMPXCHG16B 1
+#endif
+
+BOOST_FORCEINLINE BOOST_CONSTEXPR int convert_memory_order_to_gcc(memory_order order) BOOST_NOEXCEPT
+{
+ return (order == memory_order_relaxed ? __ATOMIC_RELAXED : (order == memory_order_consume ? __ATOMIC_CONSUME :
+ (order == memory_order_acquire ? __ATOMIC_ACQUIRE : (order == memory_order_release ? __ATOMIC_RELEASE :
+ (order == memory_order_acq_rel ? __ATOMIC_ACQ_REL : __ATOMIC_SEQ_CST)))));
+}
+
+} // namespace detail
+} // namespace atomics
+
+#if __GCC_ATOMIC_BOOL_LOCK_FREE == 2
+
+class atomic_flag
+{
+private:
+ atomic_flag(const atomic_flag &) /* = delete */ ;
+ atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
+ bool v_;
+
+public:
+ BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(false) {}
+
+ bool test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_test_and_set(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ void clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_clear((bool*)&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+};
+
+#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
+
+#endif // __GCC_ATOMIC_BOOL_LOCK_FREE == 2
+
+} // namespace boost
+
+#include <boost/atomic/detail/base.hpp>
+
+#if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
+
+#if __GCC_ATOMIC_CHAR_LOCK_FREE == 2
+#define BOOST_ATOMIC_CHAR_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_CHAR16_T_LOCK_FREE == 2
+#define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_CHAR32_T_LOCK_FREE == 2
+#define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_WCHAR_T_LOCK_FREE == 2
+#define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_SHORT_LOCK_FREE == 2
+#define BOOST_ATOMIC_SHORT_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_INT_LOCK_FREE == 2
+#define BOOST_ATOMIC_INT_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_LONG_LOCK_FREE == 2
+#define BOOST_ATOMIC_LONG_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_LLONG_LOCK_FREE == 2
+#define BOOST_ATOMIC_LLONG_LOCK_FREE 2
+#endif
+#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG16B) && (defined(BOOST_HAS_INT128) || !defined(BOOST_NO_ALIGNMENT))
+#define BOOST_ATOMIC_INT128_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_POINTER_LOCK_FREE == 2
+#define BOOST_ATOMIC_POINTER_LOCK_FREE 2
+#endif
+#if __GCC_ATOMIC_BOOL_LOCK_FREE == 2
+#define BOOST_ATOMIC_BOOL_LOCK_FREE 2
+#endif
+
+namespace boost {
+
+#define BOOST_ATOMIC_THREAD_FENCE 2
+BOOST_FORCEINLINE void atomic_thread_fence(memory_order order)
+{
+ __atomic_thread_fence(atomics::detail::convert_memory_order_to_gcc(order));
+}
+
+#define BOOST_ATOMIC_SIGNAL_FENCE 2
+BOOST_FORCEINLINE void atomic_signal_fence(memory_order order)
+{
+ __atomic_signal_fence(atomics::detail::convert_memory_order_to_gcc(order));
+}
+
+namespace atomics {
+namespace detail {
+
+#if defined(BOOST_ATOMIC_CHAR_LOCK_FREE) && BOOST_ATOMIC_CHAR_LOCK_FREE > 0
+
+template<typename T, bool Sign>
+class base_atomic<T, int, 1, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_add(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_sub(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_exchange_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_and(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_or(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_xor(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+template<typename T, bool Sign>
+class base_atomic<T, void, 1, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef uint8_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT :
+ v_(reinterpret_cast<storage_type const&>(v))
+ {
+ }
+
+ void store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store(&v_, (storage_type*)&v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ value_type v;
+ __atomic_load(&v_, (storage_type*)&v, atomics::detail::convert_memory_order_to_gcc(order));
+ return v;
+ }
+
+ value_type exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type r;
+ __atomic_exchange(&v_, (storage_type*)&v, (storage_type*)&r, atomics::detail::convert_memory_order_to_gcc(order));
+ return r;
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange(&v_, (storage_type*)&expected, (storage_type*)&desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type & expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange(&v_, (storage_type*)&expected, (storage_type*)&desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ storage_type v_;
+};
+
+#endif // defined(BOOST_ATOMIC_CHAR_LOCK_FREE) && BOOST_ATOMIC_CHAR_LOCK_FREE > 0
+
+#if defined(BOOST_ATOMIC_SHORT_LOCK_FREE) && BOOST_ATOMIC_SHORT_LOCK_FREE > 0
+
+template<typename T, bool Sign>
+class base_atomic<T, int, 2, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_add(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_sub(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_exchange_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_and(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_or(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_xor(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+template<typename T, bool Sign>
+class base_atomic<T, void, 2, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef uint16_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT :
+ v_(reinterpret_cast<storage_type const&>(v))
+ {
+ }
+
+ void store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store(&v_, (storage_type*)&v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ value_type v;
+ __atomic_load(&v_, (storage_type*)&v, atomics::detail::convert_memory_order_to_gcc(order));
+ return v;
+ }
+
+ value_type exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ value_type r;
+ __atomic_exchange(&v_, (storage_type*)&v, (storage_type*)&r, atomics::detail::convert_memory_order_to_gcc(order));
+ return r;
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange(&v_, (storage_type*)&expected, (storage_type*)&desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type & expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange(&v_, (storage_type*)&expected, (storage_type*)&desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ storage_type v_;
+};
+
+#endif // defined(BOOST_ATOMIC_SHORT_LOCK_FREE) && BOOST_ATOMIC_SHORT_LOCK_FREE > 0
+
+#if defined(BOOST_ATOMIC_INT_LOCK_FREE) && BOOST_ATOMIC_INT_LOCK_FREE > 0
+
+template<typename T, bool Sign>
+class base_atomic<T, int, 4, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_add(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_sub(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_exchange_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_and(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_or(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_xor(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+template<typename T, bool Sign>
+class base_atomic<T, void, 4, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
+ {
+ memcpy(&v_, &v, sizeof(value_type));
+ }
+
+ void store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = 0;
+ memcpy(&tmp, &v, sizeof(value_type));
+ __atomic_store_n(&v_, tmp, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ value_type v;
+ memcpy(&v, &tmp, sizeof(value_type));
+ return v;
+ }
+
+ value_type exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = 0;
+ memcpy(&tmp, &v, sizeof(value_type));
+ tmp = __atomic_exchange_n(&v_, tmp, atomics::detail::convert_memory_order_to_gcc(order));
+ value_type res;
+ memcpy(&res, &tmp, sizeof(value_type));
+ return res;
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ storage_type expected_s = 0, desired_s = 0;
+ memcpy(&expected_s, &expected, sizeof(value_type));
+ memcpy(&desired_s, &desired, sizeof(value_type));
+ const bool success = __atomic_compare_exchange_n(&v_, &expected_s, desired_s, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ memcpy(&expected, &expected_s, sizeof(value_type));
+ return success;
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ storage_type expected_s = 0, desired_s = 0;
+ memcpy(&expected_s, &expected, sizeof(value_type));
+ memcpy(&desired_s, &desired, sizeof(value_type));
+ const bool success = __atomic_compare_exchange_n(&v_, &expected_s, desired_s, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ memcpy(&expected, &expected_s, sizeof(value_type));
+ return success;
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ storage_type v_;
+};
+
+#endif // defined(BOOST_ATOMIC_INT_LOCK_FREE) && BOOST_ATOMIC_INT_LOCK_FREE > 0
+
+#if defined(BOOST_ATOMIC_LLONG_LOCK_FREE) && BOOST_ATOMIC_LLONG_LOCK_FREE > 0
+
+template<typename T, bool Sign>
+class base_atomic<T, int, 8, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_add(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_sub(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_exchange_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_and(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_or(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_xor(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+template<typename T, bool Sign>
+class base_atomic<T, void, 8, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef uint64_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
+ {
+ memcpy(&v_, &v, sizeof(value_type));
+ }
+
+ void store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = 0;
+ memcpy(&tmp, &v, sizeof(value_type));
+ __atomic_store_n(&v_, tmp, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ value_type v;
+ memcpy(&v, &tmp, sizeof(value_type));
+ return v;
+ }
+
+ value_type exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = 0;
+ memcpy(&tmp, &v, sizeof(value_type));
+ tmp = __atomic_exchange_n(&v_, tmp, atomics::detail::convert_memory_order_to_gcc(order));
+ value_type res;
+ memcpy(&res, &tmp, sizeof(value_type));
+ return res;
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ storage_type expected_s = 0, desired_s = 0;
+ memcpy(&expected_s, &expected, sizeof(value_type));
+ memcpy(&desired_s, &desired, sizeof(value_type));
+ const bool success = __atomic_compare_exchange_n(&v_, &expected_s, desired_s, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ memcpy(&expected, &expected_s, sizeof(value_type));
+ return success;
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ storage_type expected_s = 0, desired_s = 0;
+ memcpy(&expected_s, &expected, sizeof(value_type));
+ memcpy(&desired_s, &desired, sizeof(value_type));
+ const bool success = __atomic_compare_exchange_n(&v_, &expected_s, desired_s, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ memcpy(&expected, &expected_s, sizeof(value_type));
+ return success;
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ storage_type v_;
+};
+
+#endif // defined(BOOST_ATOMIC_LLONG_LOCK_FREE) && BOOST_ATOMIC_LLONG_LOCK_FREE > 0
+
+#if defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
+
+template<typename T, bool Sign>
+class base_atomic<T, int, 16, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_add(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_sub(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_exchange_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_and(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_or(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_xor(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+#if defined(BOOST_HAS_INT128)
+
+typedef boost::uint128_type storage128_type;
+
+#else // defined(BOOST_HAS_INT128)
+
+struct BOOST_ALIGNMENT(16) storage128_type
+{
+ uint64_t data[2];
+};
+
+inline bool operator== (storage128_type const& left, storage128_type const& right)
+{
+ return left.data[0] == right.data[0] && left.data[1] == right.data[1];
+}
+inline bool operator!= (storage128_type const& left, storage128_type const& right)
+{
+ return !(left == right);
+}
+
+#endif // defined(BOOST_HAS_INT128)
+
+template<typename T, bool Sign>
+class base_atomic<T, void, 16, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef storage128_type storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
+ {
+ memcpy(&v_, &v, sizeof(value_type));
+ }
+
+ void store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = 0;
+ memcpy(&tmp, &v, sizeof(value_type));
+ __atomic_store_n(&v_, tmp, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ value_type v;
+ memcpy(&v, &tmp, sizeof(value_type));
+ return v;
+ }
+
+ value_type exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ storage_type tmp = 0;
+ memcpy(&tmp, &v, sizeof(value_type));
+ tmp = __atomic_exchange_n(&v_, tmp, atomics::detail::convert_memory_order_to_gcc(order));
+ value_type res;
+ memcpy(&res, &tmp, sizeof(value_type));
+ return res;
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ storage_type expected_s = 0, desired_s = 0;
+ memcpy(&expected_s, &expected, sizeof(value_type));
+ memcpy(&desired_s, &desired, sizeof(value_type));
+ const bool success = __atomic_compare_exchange_n(&v_, &expected_s, desired_s, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ memcpy(&expected, &expected_s, sizeof(value_type));
+ return success;
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type const& desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ storage_type expected_s = 0, desired_s = 0;
+ memcpy(&expected_s, &expected, sizeof(value_type));
+ memcpy(&desired_s, &desired, sizeof(value_type));
+ const bool success = __atomic_compare_exchange_n(&v_, &expected_s, desired_s, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ memcpy(&expected, &expected_s, sizeof(value_type));
+ return success;
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ storage_type v_;
+};
+
+#endif // defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
+
+
+/* pointers */
+
+#if defined(BOOST_ATOMIC_POINTER_LOCK_FREE) && BOOST_ATOMIC_POINTER_LOCK_FREE > 0
+
+template<typename T, bool Sign>
+class base_atomic<T*, void*, sizeof(void*), Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T* value_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_add(&v_, v * sizeof(T), atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_sub(&v_, v * sizeof(T), atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_exchange_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+template<bool Sign>
+class base_atomic<void*, void*, sizeof(void*), Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef void* value_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
+
+ void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ __atomic_store_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_load_n(&v_, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_add(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_fetch_sub(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_exchange_n(&v_, v, atomics::detail::convert_memory_order_to_gcc(order));
+ }
+
+ bool compare_exchange_strong(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, false,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool compare_exchange_weak(
+ value_type& expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return __atomic_compare_exchange_n(&v_, &expected, desired, true,
+ atomics::detail::convert_memory_order_to_gcc(success_order),
+ atomics::detail::convert_memory_order_to_gcc(failure_order));
+ }
+
+ bool is_lock_free(void) const volatile BOOST_NOEXCEPT
+ {
+ return __atomic_is_lock_free(sizeof(v_), &v_);
+ }
+
+ BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ value_type v_;
+};
+
+#endif // defined(BOOST_ATOMIC_POINTER_LOCK_FREE) && BOOST_ATOMIC_POINTER_LOCK_FREE > 0
+
+} // namespace detail
+} // namespace atomics
+} // namespace boost
+
+#endif // !defined(BOOST_ATOMIC_FORCE_FALLBACK)
+
+#endif // BOOST_ATOMIC_DETAIL_GCC_ATOMIC_HPP
Modified: branches/release/boost/atomic/detail/gcc-cas.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-cas.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/gcc-cas.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -11,11 +11,10 @@
#ifndef BOOST_ATOMIC_DETAIL_GENERIC_CAS_HPP
#define BOOST_ATOMIC_DETAIL_GENERIC_CAS_HPP
-#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -25,16 +24,17 @@
inline void
atomic_thread_fence(memory_order order)
{
- switch(order) {
- case memory_order_relaxed:
- break;
- case memory_order_release:
- case memory_order_consume:
- case memory_order_acquire:
- case memory_order_acq_rel:
- case memory_order_seq_cst:
- __sync_synchronize();
- break;
+ switch(order)
+ {
+ case memory_order_relaxed:
+ break;
+ case memory_order_release:
+ case memory_order_consume:
+ case memory_order_acquire:
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ __sync_synchronize();
+ break;
}
}
@@ -56,16 +56,17 @@
inline void
platform_fence_before_store(memory_order order)
{
- switch(order) {
- case memory_order_relaxed:
- case memory_order_acquire:
- case memory_order_consume:
- break;
- case memory_order_release:
- case memory_order_acq_rel:
- case memory_order_seq_cst:
- __sync_synchronize();
- break;
+ switch(order)
+ {
+ case memory_order_relaxed:
+ case memory_order_acquire:
+ case memory_order_consume:
+ break;
+ case memory_order_release:
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ __sync_synchronize();
+ break;
}
}
@@ -79,16 +80,17 @@
inline void
platform_fence_after_load(memory_order order)
{
- switch(order) {
- case memory_order_relaxed:
- case memory_order_release:
- break;
- case memory_order_consume:
- case memory_order_acquire:
- case memory_order_acq_rel:
- case memory_order_seq_cst:
- __sync_synchronize();
- break;
+ switch(order)
+ {
+ case memory_order_relaxed:
+ case memory_order_release:
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ __sync_synchronize();
+ break;
}
}
@@ -132,6 +134,7 @@
return expected;
}
};
+
#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
}
Modified: branches/release/boost/atomic/detail/gcc-ppc.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-ppc.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/gcc-ppc.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -8,11 +8,12 @@
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
+#include <string.h>
#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -65,40 +66,43 @@
inline void
ppc_fence_before(memory_order order)
{
- switch(order) {
- case memory_order_release:
- case memory_order_acq_rel:
+ switch(order)
+ {
+ case memory_order_release:
+ case memory_order_acq_rel:
#if defined(__powerpc64__)
- __asm__ __volatile__ ("lwsync" ::: "memory");
- break;
+ __asm__ __volatile__ ("lwsync" ::: "memory");
+ break;
#endif
- case memory_order_seq_cst:
- __asm__ __volatile__ ("sync" ::: "memory");
- default:;
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("sync" ::: "memory");
+ default:;
}
}
inline void
ppc_fence_after(memory_order order)
{
- switch(order) {
- case memory_order_acquire:
- case memory_order_acq_rel:
- case memory_order_seq_cst:
- __asm__ __volatile__ ("isync");
- case memory_order_consume:
- __asm__ __volatile__ ("" ::: "memory");
- default:;
+ switch(order)
+ {
+ case memory_order_acquire:
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("isync");
+ case memory_order_consume:
+ __asm__ __volatile__ ("" ::: "memory");
+ default:;
}
}
inline void
ppc_fence_after_store(memory_order order)
{
- switch(order) {
- case memory_order_seq_cst:
- __asm__ __volatile__ ("sync");
- default:;
+ switch(order)
+ {
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("sync");
+ default:;
}
}
@@ -199,13 +203,18 @@
template<typename T>
class base_atomic<T, int, 1, true>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef int32_t storage_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -411,22 +420,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T>
class base_atomic<T, int, 1, false>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -633,22 +649,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T>
class base_atomic<T, int, 2, true>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef int32_t storage_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -855,22 +878,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T>
class base_atomic<T, int, 2, false>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1077,21 +1107,28 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1292,9 +1329,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -1303,12 +1342,17 @@
template<typename T, bool Sign>
class base_atomic<T, int, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1509,9 +1553,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -1524,12 +1570,17 @@
template<bool Sign>
class base_atomic<void *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef void * value_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1681,21 +1732,28 @@
}
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1849,9 +1907,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -1860,12 +1920,17 @@
template<bool Sign>
class base_atomic<void *, void *, 8, Sign>
{
+private:
typedef base_atomic this_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef void * value_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -2017,21 +2082,28 @@
}
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T *, void *, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -2185,9 +2257,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -2198,15 +2272,20 @@
template<typename T, bool Sign>
class base_atomic<T, void, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -2340,26 +2419,32 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -2492,26 +2577,32 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -2644,9 +2735,11 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
@@ -2655,17 +2748,21 @@
template<typename T, bool Sign>
class base_atomic<T, void, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint64_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -2798,11 +2895,14 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
+
#endif
}
@@ -2812,19 +2912,20 @@
inline void
atomic_thread_fence(memory_order order)
{
- switch(order) {
- case memory_order_acquire:
- __asm__ __volatile__ ("isync" ::: "memory");
- break;
- case memory_order_release:
+ switch(order)
+ {
+ case memory_order_acquire:
+ __asm__ __volatile__ ("isync" ::: "memory");
+ break;
+ case memory_order_release:
#if defined(__powerpc64__)
- __asm__ __volatile__ ("lwsync" ::: "memory");
- break;
+ __asm__ __volatile__ ("lwsync" ::: "memory");
+ break;
#endif
- case memory_order_acq_rel:
- case memory_order_seq_cst:
- __asm__ __volatile__ ("sync" ::: "memory");
- default:;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("sync" ::: "memory");
+ default:;
}
}
@@ -2832,14 +2933,15 @@
inline void
atomic_signal_fence(memory_order order)
{
- switch(order) {
- case memory_order_acquire:
- case memory_order_release:
- case memory_order_acq_rel:
- case memory_order_seq_cst:
- __asm__ __volatile__ ("" ::: "memory");
- break;
- default:;
+ switch(order)
+ {
+ case memory_order_acquire:
+ case memory_order_release:
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("" ::: "memory");
+ break;
+ default:;
}
}
Modified: branches/release/boost/atomic/detail/gcc-sparcv9.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-sparcv9.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/gcc-sparcv9.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -8,11 +8,12 @@
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
+#include <string.h>
#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -23,53 +24,56 @@
inline void
platform_fence_before(memory_order order)
{
- switch(order) {
- case memory_order_relaxed:
- case memory_order_acquire:
- case memory_order_consume:
- break;
- case memory_order_release:
- case memory_order_acq_rel:
- __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
- /* release */
- break;
- case memory_order_seq_cst:
- __asm__ __volatile__ ("membar #Sync" ::: "memory");
- /* seq */
- break;
+ switch(order)
+ {
+ case memory_order_relaxed:
+ case memory_order_acquire:
+ case memory_order_consume:
+ break;
+ case memory_order_release:
+ case memory_order_acq_rel:
+ __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
+ /* release */
+ break;
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("membar #Sync" ::: "memory");
+ /* seq */
+ break;
}
}
inline void
platform_fence_after(memory_order order)
{
- switch(order) {
- case memory_order_relaxed:
- case memory_order_release:
- break;
- case memory_order_acquire:
- case memory_order_acq_rel:
- __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
- /* acquire */
- break;
- case memory_order_consume:
- /* consume */
- break;
- case memory_order_seq_cst:
- __asm__ __volatile__ ("membar #Sync" ::: "memory");
- /* seq */
- break;
- default:;
+ switch(order)
+ {
+ case memory_order_relaxed:
+ case memory_order_release:
+ break;
+ case memory_order_acquire:
+ case memory_order_acq_rel:
+ __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
+ /* acquire */
+ break;
+ case memory_order_consume:
+ /* consume */
+ break;
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("membar #Sync" ::: "memory");
+ /* seq */
+ break;
+ default:;
}
}
inline void
platform_fence_after_store(memory_order order)
{
- switch(order) {
- case memory_order_seq_cst:
- __asm__ __volatile__ ("membar #Sync" ::: "memory");
- default:;
+ switch(order)
+ {
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("membar #Sync" ::: "memory");
+ default:;
}
}
@@ -141,24 +145,25 @@
inline void
atomic_thread_fence(memory_order order)
{
- switch(order) {
- case memory_order_relaxed:
- break;
- case memory_order_release:
- __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
- break;
- case memory_order_acquire:
- __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
- break;
- case memory_order_acq_rel:
- __asm__ __volatile__ ("membar #LoadLoad | #LoadStore | #StoreStore" ::: "memory");
- break;
- case memory_order_consume:
- break;
- case memory_order_seq_cst:
- __asm__ __volatile__ ("membar #Sync" ::: "memory");
- break;
- default:;
+ switch(order)
+ {
+ case memory_order_relaxed:
+ break;
+ case memory_order_release:
+ __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
+ break;
+ case memory_order_acquire:
+ __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
+ break;
+ case memory_order_acq_rel:
+ __asm__ __volatile__ ("membar #LoadLoad | #LoadStore | #StoreStore" ::: "memory");
+ break;
+ case memory_order_consume:
+ break;
+ case memory_order_seq_cst:
+ __asm__ __volatile__ ("membar #Sync" ::: "memory");
+ break;
+ default:;
}
}
@@ -177,13 +182,18 @@
template<typename T>
class base_atomic<T, int, 1, true>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef int32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -291,22 +301,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T>
class base_atomic<T, int, 1, false>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -414,22 +431,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T>
class base_atomic<T, int, 2, true>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef int32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -537,22 +561,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T>
class base_atomic<T, int, 2, false>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -660,21 +691,28 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -780,9 +818,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -791,12 +831,17 @@
template<bool Sign>
class base_atomic<void *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef void * value_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -876,21 +921,27 @@
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -972,9 +1023,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -983,15 +1036,20 @@
template<typename T, bool Sign>
class base_atomic<T, void, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1064,24 +1122,31 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1154,24 +1219,31 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1244,9 +1316,11 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
Modified: branches/release/boost/atomic/detail/gcc-x86.hpp
==============================================================================
--- branches/release/boost/atomic/detail/gcc-x86.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/gcc-x86.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -8,11 +8,12 @@
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
+#include <string.h>
#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -37,6 +38,10 @@
#define BOOST_ATOMIC_X86_HAS_CMPXCHG8B 1
#endif
+#if defined(__x86_64__) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)
+#define BOOST_ATOMIC_X86_HAS_CMPXCHG16B 1
+#endif
+
inline void
platform_fence_before(memory_order order)
{
@@ -209,8 +214,10 @@
#if defined(__x86_64__) || defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
#define BOOST_ATOMIC_LLONG_LOCK_FREE 2
-#else
-#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
+#endif
+
+#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG16B) && (defined(BOOST_HAS_INT128) || !defined(BOOST_NO_ALIGNMENT))
+#define BOOST_ATOMIC_INT128_LOCK_FREE 2
#endif
#define BOOST_ATOMIC_POINTER_LOCK_FREE 2
@@ -257,12 +264,17 @@
template<typename T, bool Sign>
class base_atomic<T, int, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -287,9 +299,12 @@
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"lock ; xaddb %0, %1"
: "+q" (v), "+m" (v_)
+ :
+ : "cc"
);
platform_fence_after(order);
return v;
@@ -305,7 +320,8 @@
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgb %0, %1"
: "+q" (v), "+m" (v_)
);
@@ -322,12 +338,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgb %2, %1"
- : "+a" (previous), "+m" (v_)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgb %3, %1\n\t"
+ "sete %2"
+ : "+a" (previous), "+m" (v_), "=q" (success)
: "q" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -350,7 +369,7 @@
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for(; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -361,7 +380,7 @@
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -372,7 +391,7 @@
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -386,21 +405,28 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -425,9 +451,12 @@
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"lock ; xaddw %0, %1"
: "+q" (v), "+m" (v_)
+ :
+ : "cc"
);
platform_fence_after(order);
return v;
@@ -443,7 +472,8 @@
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgw %0, %1"
: "+q" (v), "+m" (v_)
);
@@ -460,12 +490,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgw %2, %1"
- : "+a" (previous), "+m" (v_)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgw %3, %1\n\t"
+ "sete %2"
+ : "+a" (previous), "+m" (v_), "=q" (success)
: "q" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -488,7 +521,7 @@
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -499,7 +532,7 @@
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -510,7 +543,7 @@
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -524,21 +557,28 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -563,9 +603,12 @@
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"lock ; xaddl %0, %1"
: "+r" (v), "+m" (v_)
+ :
+ : "cc"
);
platform_fence_after(order);
return v;
@@ -581,7 +624,8 @@
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgl %0, %1"
: "+r" (v), "+m" (v_)
);
@@ -598,12 +642,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgl %2, %1"
- : "+a" (previous), "+m" (v_)
- : "r" (desired)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgl %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
+ : "r,r" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -626,7 +673,7 @@
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -637,7 +684,7 @@
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -648,7 +695,7 @@
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -662,9 +709,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -672,12 +721,17 @@
template<typename T, bool Sign>
class base_atomic<T, int, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -702,9 +756,12 @@
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"lock ; xaddq %0, %1"
: "+r" (v), "+m" (v_)
+ :
+ : "cc"
);
platform_fence_after(order);
return v;
@@ -720,7 +777,8 @@
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgq %0, %1"
: "+r" (v), "+m" (v_)
);
@@ -737,12 +795,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgq %2, %1"
- : "+a" (previous), "+m" (v_)
- : "r" (desired)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgq %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
+ : "r,r" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -765,7 +826,7 @@
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -776,7 +837,7 @@
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -787,7 +848,7 @@
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
+ while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
{
BOOST_ATOMIC_X86_PAUSE();
}
@@ -801,9 +862,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -811,17 +874,23 @@
/* pointers */
-#if !defined(__x86_64__)
+// NOTE: x32 target is still regarded to as x86_64 and can only be detected by the size of pointers
+#if !defined(__x86_64__) || (defined(__SIZEOF_POINTER__) && __SIZEOF_POINTER__ == 4)
template<bool Sign>
class base_atomic<void *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef void * value_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -844,7 +913,8 @@
value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgl %0, %1"
: "+r" (v), "+m" (v_)
);
@@ -858,12 +928,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgl %2, %1"
- : "+a" (previous), "+m" (v_)
- : "r" (desired)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgl %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
+ : "r,r" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -889,10 +962,13 @@
fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
- "lock ; xaddl %0, %1"
- : "+r" (v), "+m" (v_)
- );
+ __asm__ __volatile__
+ (
+ "lock ; xaddl %0, %1"
+ : "+r" (v), "+m" (v_)
+ :
+ : "cc"
+ );
platform_fence_after(order);
return reinterpret_cast<value_type>(v);
}
@@ -904,21 +980,28 @@
}
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T *, void *, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -943,7 +1026,8 @@
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgl %0, %1"
: "+r" (v), "+m" (v_)
);
@@ -960,12 +1044,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgl %2, %1"
- : "+a" (previous), "+m" (v_)
- : "r" (desired)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgl %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
+ : "r,r" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -989,9 +1076,12 @@
{
v = v * sizeof(*v_);
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"lock ; xaddl %0, %1"
: "+r" (v), "+m" (v_)
+ :
+ : "cc"
);
platform_fence_after(order);
return reinterpret_cast<value_type>(v);
@@ -1010,9 +1100,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -1021,12 +1113,17 @@
template<bool Sign>
class base_atomic<void *, void *, 8, Sign>
{
+private:
typedef base_atomic this_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef void * value_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1049,7 +1146,8 @@
value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgq %0, %1"
: "+r" (v), "+m" (v_)
);
@@ -1063,12 +1161,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgq %2, %1"
- : "+a" (previous), "+m" (v_)
- : "r" (desired)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgq %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
+ : "r,r" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -1094,9 +1195,12 @@
fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"lock ; xaddq %0, %1"
: "+r" (v), "+m" (v_)
+ :
+ : "cc"
);
platform_fence_after(order);
return reinterpret_cast<value_type>(v);
@@ -1109,21 +1213,28 @@
}
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T *, void *, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T * value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1148,7 +1259,8 @@
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgq %0, %1"
: "+r" (v), "+m" (v_)
);
@@ -1165,12 +1277,15 @@
{
value_type previous = expected;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgq %2, %1"
- : "+a" (previous), "+m" (v_)
- : "r" (desired)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgq %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
+ : "r,r" (desired)
+ : "cc"
);
- bool success = (previous == expected);
if (success)
platform_fence_after(success_order);
else
@@ -1194,9 +1309,12 @@
{
v = v * sizeof(*v_);
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"lock ; xaddq %0, %1"
: "+r" (v), "+m" (v_)
+ :
+ : "cc"
);
platform_fence_after(order);
return reinterpret_cast<value_type>(v);
@@ -1215,9 +1333,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -1226,14 +1346,20 @@
template<typename T, bool Sign>
class base_atomic<T, void, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint8_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT :
v_(reinterpret_cast<storage_type const&>(v))
- {}
- base_atomic(void) {}
+ {
+ }
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1264,7 +1390,8 @@
storage_type tmp;
memcpy(&tmp, &v, sizeof(value_type));
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgb %0, %1"
: "+q" (tmp), "+m" (v_)
);
@@ -1286,12 +1413,15 @@
memcpy(&desired_s, &desired, sizeof(value_type));
storage_type previous_s = expected_s;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgb %2, %1"
- : "+a" (previous_s), "+m" (v_)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgb %3, %1\n\t"
+ "sete %2"
+ : "+a" (previous_s), "+m" (v_), "=q" (success)
: "q" (desired_s)
+ : "cc"
);
- bool success = (previous_s == expected_s);
if (success)
platform_fence_after(success_order);
else
@@ -1317,23 +1447,31 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint16_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT :
v_(reinterpret_cast<storage_type const&>(v))
- {}
- base_atomic(void) {}
+ {
+ }
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1364,7 +1502,8 @@
storage_type tmp;
memcpy(&tmp, &v, sizeof(value_type));
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgw %0, %1"
: "+q" (tmp), "+m" (v_)
);
@@ -1386,12 +1525,15 @@
memcpy(&desired_s, &desired, sizeof(value_type));
storage_type previous_s = expected_s;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgw %2, %1"
- : "+a" (previous_s), "+m" (v_)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgw %3, %1\n\t"
+ "sete %2"
+ : "+a" (previous_s), "+m" (v_), "=q" (success)
: "q" (desired_s)
+ : "cc"
);
- bool success = (previous_s == expected_s);
if (success)
platform_fence_after(success_order);
else
@@ -1417,24 +1559,31 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1465,7 +1614,8 @@
storage_type tmp = 0;
memcpy(&tmp, &v, sizeof(value_type));
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgl %0, %1"
: "+q" (tmp), "+m" (v_)
);
@@ -1487,12 +1637,15 @@
memcpy(&desired_s, &desired, sizeof(value_type));
storage_type previous_s = expected_s;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgl %2, %1"
- : "+a" (previous_s), "+m" (v_)
- : "q" (desired_s)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgl %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous_s), "+m,m" (v_), "=q,m" (success)
+ : "q,q" (desired_s)
+ : "cc"
);
- bool success = (previous_s == expected_s);
if (success)
platform_fence_after(success_order);
else
@@ -1518,9 +1671,11 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
@@ -1528,15 +1683,20 @@
template<typename T, bool Sign>
class base_atomic<T, void, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint64_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1567,7 +1727,8 @@
storage_type tmp = 0;
memcpy(&tmp, &v, sizeof(value_type));
platform_fence_before(order);
- __asm__ (
+ __asm__ __volatile__
+ (
"xchgq %0, %1"
: "+q" (tmp), "+m" (v_)
);
@@ -1589,12 +1750,15 @@
memcpy(&desired_s, &desired, sizeof(value_type));
storage_type previous_s = expected_s;
platform_fence_before(success_order);
- __asm__ (
- "lock ; cmpxchgq %2, %1"
- : "+a" (previous_s), "+m" (v_)
- : "q" (desired_s)
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock ; cmpxchgq %3, %1\n\t"
+ "sete %2"
+ : "+a,a" (previous_s), "+m,m" (v_), "=q,m" (success)
+ : "q,q" (desired_s)
+ : "cc"
);
- bool success = (previous_s == expected_s);
if (success)
platform_fence_after(success_order);
else
@@ -1620,9 +1784,11 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
#endif
@@ -1640,7 +1806,6 @@
return result;
#else
uint32_t scratch;
- T prev = expected;
/* Make sure ebx is saved and restored properly in case
this object is compiled as "position independent". Since
programmers on x86 tend to forget specifying -DPIC or
@@ -1655,16 +1820,18 @@
In theory, could push/pop ebx onto/off the stack, but movs
to a prepared stack slot turn out to be faster. */
- __asm__ __volatile__ (
- "movl %%ebx, %1\n"
- "movl %2, %%ebx\n"
- "lock; cmpxchg8b 0(%4)\n"
- "movl %1, %%ebx\n"
- : "=A" (prev), "=m" (scratch)
- : "D" ((uint32_t)desired), "c" ((uint32_t)(desired >> 32)), "S" (ptr), "0" (prev)
- : "memory");
- bool success = (prev == expected);
- expected = prev;
+ bool success;
+ __asm__ __volatile__
+ (
+ "movl %%ebx, %[scratch]\n\t"
+ "movl %[desired_lo], %%ebx\n\t"
+ "lock; cmpxchg8b %[dest]\n\t"
+ "movl %[scratch], %%ebx\n\t"
+ "sete %[success]"
+ : "+A,A,A,A,A,A" (expected), [dest] "+m,m,m,m,m,m" (*ptr), [scratch] "=m,m,m,m,m,m" (scratch), [success] "=q,m,q,m,q,m" (success)
+ : [desired_lo] "S,S,D,D,m,m" ((uint32_t)desired), "c,c,c,c,c,c" ((uint32_t)(desired >> 32))
+ : "memory", "cc"
+ );
return success;
#endif
}
@@ -1686,11 +1853,11 @@
#if defined(__SSE2__)
__asm__ __volatile__
(
- "movq %1, %%xmm0\n\t"
- "movq %%xmm0, %0\n\t"
+ "movq %1, %%xmm4\n\t"
+ "movq %%xmm4, %0\n\t"
: "=m" (*ptr)
: "m" (value)
- : "memory", "xmm0"
+ : "memory", "xmm4"
);
#else
__asm__ __volatile__
@@ -1705,11 +1872,21 @@
}
else
{
- T expected = *ptr;
- while (!platform_cmpxchg64_strong(expected, value, ptr))
- {
- BOOST_ATOMIC_X86_PAUSE();
- }
+ uint32_t scratch;
+ __asm__ __volatile__
+ (
+ "movl %%ebx, %[scratch]\n\t"
+ "movl %[value_lo], %%ebx\n\t"
+ "movl 0(%[dest]), %%eax\n\t"
+ "movl 4(%[dest]), %%edx\n\t"
+ ".align 16\n\t"
+ "1: lock; cmpxchg8b 0(%[dest])\n\t"
+ "jne 1b\n\t"
+ "movl %[scratch], %%ebx"
+ : [scratch] "=m,m" (scratch)
+ : [value_lo] "a,a" ((uint32_t)value), "c,c" ((uint32_t)(value >> 32)), [dest] "D,S" (ptr)
+ : "memory", "cc", "edx"
+ );
}
}
@@ -1717,18 +1894,18 @@
inline T
platform_load64(const volatile T * ptr) BOOST_NOEXCEPT
{
- T value = T();
+ T value;
if (((uint32_t)ptr & 0x00000007) == 0)
{
#if defined(__SSE2__)
__asm__ __volatile__
(
- "movq %1, %%xmm0\n\t"
- "movq %%xmm0, %0\n\t"
+ "movq %1, %%xmm4\n\t"
+ "movq %%xmm4, %0\n\t"
: "=m" (value)
: "m" (*ptr)
- : "memory", "xmm0"
+ : "memory", "xmm4"
);
#else
__asm__ __volatile__
@@ -1744,7 +1921,16 @@
else
{
// We don't care for comparison result here; the previous value will be stored into value anyway.
- platform_cmpxchg64_strong(value, value, const_cast<volatile T*>(ptr));
+ // Also we don't care for ebx and ecx values, they just have to be equal to eax and edx before cmpxchg8b.
+ __asm__ __volatile__
+ (
+ "movl %%ebx, %%eax\n\t"
+ "movl %%ecx, %%edx\n\t"
+ "lock; cmpxchg8b %[dest]"
+ : "=&A" (value)
+ : [dest] "m" (*ptr)
+ : "cc"
+ );
}
return value;
@@ -1752,6 +1938,66 @@
#endif
+#if defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
+
+template<typename T>
+inline bool
+platform_cmpxchg128_strong(T& expected, T desired, volatile T* ptr) BOOST_NOEXCEPT
+{
+ uint64_t const* p_desired = (uint64_t const*)&desired;
+ bool success;
+ __asm__ __volatile__
+ (
+ "lock; cmpxchg16b %[dest]\n\t"
+ "sete %[success]"
+ : "+A,A" (expected), [dest] "+m,m" (*ptr), [success] "=q,m" (success)
+ : "b,b" (p_desired[0]), "c,c" (p_desired[1])
+ : "memory", "cc"
+ );
+ return success;
+}
+
+template<typename T>
+inline void
+platform_store128(T value, volatile T* ptr) BOOST_NOEXCEPT
+{
+ uint64_t const* p_value = (uint64_t const*)&value;
+ __asm__ __volatile__
+ (
+ "movq 0(%[dest]), %%rax\n\t"
+ "movq 8(%[dest]), %%rdx\n\t"
+ ".align 16\n\t"
+ "1: lock; cmpxchg16b 0(%[dest])\n\t"
+ "jne 1b"
+ :
+ : "b" (p_value[0]), "c" (p_value[1]), [dest] "r" (ptr)
+ : "memory", "cc", "rax", "rdx"
+ );
+}
+
+template<typename T>
+inline T
+platform_load128(const volatile T* ptr) BOOST_NOEXCEPT
+{
+ T value;
+
+ // We don't care for comparison result here; the previous value will be stored into value anyway.
+ // Also we don't care for rbx and rcx values, they just have to be equal to rax and rdx before cmpxchg16b.
+ __asm__ __volatile__
+ (
+ "movq %%rbx, %%rax\n\t"
+ "movq %%rcx, %%rdx\n\t"
+ "lock; cmpxchg16b %[dest]"
+ : "=&A" (value)
+ : [dest] "m" (*ptr)
+ : "cc"
+ );
+
+ return value;
+}
+
+#endif // defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
+
}
}
}
@@ -1761,6 +2007,11 @@
#include <boost/atomic/detail/cas64strong.hpp>
#endif
+/* pull in 128-bit atomic type using cmpxchg16b above */
+#if defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
+#include <boost/atomic/detail/cas128strong.hpp>
+#endif
+
#endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */
#endif
Modified: branches/release/boost/atomic/detail/generic-cas.hpp
==============================================================================
--- branches/release/boost/atomic/detail/generic-cas.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/generic-cas.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -14,7 +14,7 @@
#include <boost/atomic/detail/base.hpp>
#include <boost/atomic/detail/builder.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
Modified: branches/release/boost/atomic/detail/interlocked.hpp
==============================================================================
--- branches/release/boost/atomic/detail/interlocked.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/interlocked.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -10,7 +10,7 @@
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -43,6 +43,11 @@
#define BOOST_ATOMIC_INTERLOCKED_OR(dest, arg) _InterlockedOr((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_XOR(dest, arg) _InterlockedXor((long*)(dest), (long)(arg))
+#if (defined(_M_IX86) && _M_IX86 >= 500) || defined(_M_AMD64) || defined(_M_IA64)
+#pragma intrinsic(_InterlockedCompareExchange64)
+#define BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(dest, exchange, compare) _InterlockedCompareExchange64((__int64*)(dest), (__int64)(exchange), (__int64)(compare))
+#endif
+
#if _MSC_VER >= 1600
// MSVC 2010 and later provide intrinsics for 8 and 16 bit integers.
@@ -81,14 +86,12 @@
#if defined(_M_AMD64) || defined(_M_IA64)
-#pragma intrinsic(_InterlockedCompareExchange64)
#pragma intrinsic(_InterlockedExchangeAdd64)
#pragma intrinsic(_InterlockedExchange64)
#pragma intrinsic(_InterlockedAnd64)
#pragma intrinsic(_InterlockedOr64)
#pragma intrinsic(_InterlockedXor64)
-#define BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(dest, exchange, compare) _InterlockedCompareExchange64((__int64*)(dest), (__int64)(exchange), (__int64)(compare))
#define BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(dest, addend) _InterlockedExchangeAdd64((__int64*)(dest), (__int64)(addend))
#define BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(dest, newval) _InterlockedExchange64((__int64*)(dest), (__int64)(newval))
#define BOOST_ATOMIC_INTERLOCKED_AND64(dest, arg) _InterlockedAnd64((__int64*)(dest), (__int64)(arg))
Modified: branches/release/boost/atomic/detail/linux-arm.hpp
==============================================================================
--- branches/release/boost/atomic/detail/linux-arm.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/linux-arm.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -36,7 +36,7 @@
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -163,6 +163,7 @@
return expected;
}
};
+
#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
}
Modified: branches/release/boost/atomic/detail/lockpool.hpp
==============================================================================
--- branches/release/boost/atomic/detail/lockpool.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/lockpool.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -12,7 +12,7 @@
#include <boost/thread/mutex.hpp>
#endif
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -31,9 +31,6 @@
private:
lock_type& mtx_;
- scoped_lock(scoped_lock const&) /* = delete */;
- scoped_lock& operator=(scoped_lock const&) /* = delete */;
-
public:
explicit
scoped_lock(const volatile void * addr) : mtx_(get_lock_for(addr))
@@ -44,6 +41,9 @@
{
mtx_.unlock();
}
+
+ BOOST_DELETED_FUNCTION(scoped_lock(scoped_lock const&))
+ BOOST_DELETED_FUNCTION(scoped_lock& operator=(scoped_lock const&))
};
private:
@@ -61,10 +61,6 @@
{
private:
atomic_flag& flag_;
- uint8_t padding[128 - sizeof(atomic_flag)];
-
- scoped_lock(const scoped_lock &) /* = delete */;
- scoped_lock& operator=(const scoped_lock &) /* = delete */;
public:
explicit
@@ -82,6 +78,9 @@
{
flag_.clear(memory_order_release);
}
+
+ BOOST_DELETED_FUNCTION(scoped_lock(const scoped_lock &))
+ BOOST_DELETED_FUNCTION(scoped_lock& operator=(const scoped_lock &))
};
private:
Modified: branches/release/boost/atomic/detail/platform.hpp
==============================================================================
--- branches/release/boost/atomic/detail/platform.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/platform.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -11,11 +11,17 @@
#include <boost/atomic/detail/config.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
-#if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
+// Intel compiler does not support __atomic* intrinsics properly, although defines them (tested with 13.0.1 and 13.1.1 on Linux)
+#if (defined(__GNUC__) && ((__GNUC__ * 100 + __GNUC_MINOR__) >= 407) && !defined(BOOST_INTEL_CXX_VERSION))\
+ || (defined(BOOST_CLANG) && ((__clang_major__ * 100 + __clang_minor__) >= 302))
+
+ #include <boost/atomic/detail/gcc-atomic.hpp>
+
+#elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
#include <boost/atomic/detail/gcc-x86.hpp>
Modified: branches/release/boost/atomic/detail/type-classification.hpp
==============================================================================
--- branches/release/boost/atomic/detail/type-classification.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/type-classification.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -10,7 +10,7 @@
#include <boost/atomic/detail/config.hpp>
#include <boost/type_traits/is_integral.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -36,7 +36,7 @@
enum _
{
size = sizeof(T),
- value = (size == 3 ? 4 : (size == 5 || size == 6 || size == 7 ? 8 : size))
+ value = (size == 3 ? 4 : (size >= 5 && size <= 7 ? 8 : (size >= 9 && size <= 15 ? 16 : size)))
};
};
Modified: branches/release/boost/atomic/detail/windows.hpp
==============================================================================
--- branches/release/boost/atomic/detail/windows.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/boost/atomic/detail/windows.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -9,13 +9,14 @@
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
+#include <string.h>
#include <cstddef>
#include <boost/cstdint.hpp>
#include <boost/type_traits/make_signed.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/interlocked.hpp>
-#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
+#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
@@ -33,6 +34,10 @@
#define BOOST_ATOMIC_X86_PAUSE()
#endif
+#if defined(_M_IX86) && _M_IX86 >= 500
+#define BOOST_ATOMIC_X86_HAS_CMPXCHG8B 1
+#endif
+
// Define hardware barriers
#if defined(_MSC_VER) && (defined(_M_AMD64) || (defined(_M_IX86) && defined(_M_IX86_FP) && _M_IX86_FP >= 2))
extern "C" void _mm_mfence(void);
@@ -179,7 +184,7 @@
#define BOOST_ATOMIC_SHORT_LOCK_FREE 2
#define BOOST_ATOMIC_INT_LOCK_FREE 2
#define BOOST_ATOMIC_LONG_LOCK_FREE 2
-#if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
+#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B) || defined(_M_AMD64) || defined(_M_IA64)
#define BOOST_ATOMIC_LLONG_LOCK_FREE 2
#else
#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
@@ -200,6 +205,7 @@
template<typename T, bool Sign>
class base_atomic<T, int, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
#ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
@@ -208,9 +214,13 @@
typedef uint32_t storage_type;
#endif
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -373,9 +383,11 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
@@ -386,6 +398,7 @@
template<typename T, bool Sign>
class base_atomic<T, int, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
#ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
@@ -394,9 +407,13 @@
typedef uint32_t storage_type;
#endif
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -559,22 +576,29 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, int, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef value_type storage_type;
typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -710,179 +734,31 @@
}
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
-private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
- storage_type v_;
-};
-
-#if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
-
-template<typename T, bool Sign>
-class base_atomic<T, int, 8, Sign>
-{
- typedef base_atomic this_type;
- typedef T value_type;
- typedef value_type storage_type;
- typedef T difference_type;
-public:
- BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
- base_atomic(void) {}
-
- void
- store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- if (order != memory_order_seq_cst) {
- platform_fence_before(order);
- v_ = static_cast< storage_type >(v);
- } else {
- exchange(v, order);
- }
- }
-
- value_type
- load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
- {
- value_type v = static_cast< value_type >(v_);
- platform_fence_after_load(order);
- return v;
- }
- value_type
- fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- platform_fence_before(order);
- v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&v_, v));
- platform_fence_after(order);
- return v;
- }
-
- value_type
- fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- typedef typename make_signed< value_type >::type signed_value_type;
- return fetch_add(static_cast< value_type >(-static_cast< signed_value_type >(v)), order);
- }
-
- value_type
- exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
- platform_fence_before(order);
- v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&v_, v));
- platform_fence_after(order);
- return v;
- }
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
- bool
- compare_exchange_strong(
- value_type & expected,
- value_type desired,
- memory_order success_order,
- memory_order failure_order) volatile BOOST_NOEXCEPT
- {
- value_type previous = expected;
- platform_fence_before(success_order);
- value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&v_, desired, previous));
- bool success = (previous == oldval);
- if (success)
- platform_fence_after(success_order);
- else
- platform_fence_after(failure_order);
- expected = oldval;
- return success;
- }
-
- bool
- compare_exchange_weak(
- value_type & expected,
- value_type desired,
- memory_order success_order,
- memory_order failure_order) volatile BOOST_NOEXCEPT
- {
- return compare_exchange_strong(expected, desired, success_order, failure_order);
- }
-
- value_type
- fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
-#if defined(BOOST_ATOMIC_INTERLOCKED_AND64)
- platform_fence_before(order);
- v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&v_, v));
- platform_fence_after(order);
- return v;
-#else
- value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
- {
- BOOST_ATOMIC_X86_PAUSE();
- }
- return tmp;
-#endif
- }
-
- value_type
- fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
-#if defined(BOOST_ATOMIC_INTERLOCKED_OR64)
- platform_fence_before(order);
- v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&v_, v));
- platform_fence_after(order);
- return v;
-#else
- value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
- {
- BOOST_ATOMIC_X86_PAUSE();
- }
- return tmp;
-#endif
- }
-
- value_type
- fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
- {
-#if defined(BOOST_ATOMIC_INTERLOCKED_XOR64)
- platform_fence_before(order);
- v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&v_, v));
- platform_fence_after(order);
- return v;
-#else
- value_type tmp = load(memory_order_relaxed);
- for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
- {
- BOOST_ATOMIC_X86_PAUSE();
- }
- return tmp;
-#endif
- }
-
- bool
- is_lock_free(void)const volatile BOOST_NOEXCEPT
- {
- return true;
- }
-
- BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
-#endif // defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
-
// MSVC 2012 fails to recognize sizeof(T) as a constant expression in template specializations
enum msvc_sizeof_pointer_workaround { sizeof_pointer = sizeof(void*) };
template<bool Sign>
class base_atomic<void*, void*, sizeof_pointer, Sign>
{
+private:
typedef base_atomic this_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
typedef void* value_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -955,21 +831,28 @@
}
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
template<typename T, bool Sign>
class base_atomic<T*, void*, sizeof_pointer, Sign>
{
+private:
typedef base_atomic this_type;
typedef T* value_type;
- typedef ptrdiff_t difference_type;
+ typedef std::ptrdiff_t difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
- base_atomic(void) {}
void
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1051,9 +934,11 @@
}
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
value_type v_;
};
@@ -1061,6 +946,7 @@
template<typename T, bool Sign>
class base_atomic<T, void, 1, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
#ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
@@ -1068,7 +954,13 @@
#else
typedef uint32_t storage_type;
#endif
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+
#ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(reinterpret_cast< storage_type const& >(v))
{
@@ -1079,7 +971,6 @@
memcpy(&v_, &v, sizeof(value_type));
}
#endif
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1163,15 +1054,18 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 2, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
#ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
@@ -1179,7 +1073,13 @@
#else
typedef uint32_t storage_type;
#endif
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+
#ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(reinterpret_cast< storage_type const& >(v))
{
@@ -1191,8 +1091,6 @@
}
#endif
- base_atomic(void) {}
-
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
@@ -1275,24 +1173,31 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
template<typename T, bool Sign>
class base_atomic<T, void, 4, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint32_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1362,32 +1267,197 @@
}
bool
- is_lock_free(void)const volatile BOOST_NOEXCEPT
+ is_lock_free(void) const volatile BOOST_NOEXCEPT
{
return true;
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
-#if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
+#if defined(_M_AMD64) || defined(_M_IA64)
+
+template<typename T, bool Sign>
+class base_atomic<T, int, 8, Sign>
+{
+private:
+ typedef base_atomic this_type;
+ typedef T value_type;
+ typedef value_type storage_type;
+ typedef T difference_type;
+
+protected:
+ typedef value_type value_arg_type;
+
+public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
+ BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
+
+ void
+ store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ if (order != memory_order_seq_cst) {
+ platform_fence_before(order);
+ v_ = static_cast< storage_type >(v);
+ } else {
+ exchange(v, order);
+ }
+ }
+
+ value_type
+ load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
+ {
+ value_type v = static_cast< value_type >(v_);
+ platform_fence_after_load(order);
+ return v;
+ }
+
+ value_type
+ fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(order);
+ v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&v_, v));
+ platform_fence_after(order);
+ return v;
+ }
+
+ value_type
+ fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ typedef typename make_signed< value_type >::type signed_value_type;
+ return fetch_add(static_cast< value_type >(-static_cast< signed_value_type >(v)), order);
+ }
+
+ value_type
+ exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+ platform_fence_before(order);
+ v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&v_, v));
+ platform_fence_after(order);
+ return v;
+ }
+
+ bool
+ compare_exchange_strong(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ value_type previous = expected;
+ platform_fence_before(success_order);
+ value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&v_, desired, previous));
+ bool success = (previous == oldval);
+ if (success)
+ platform_fence_after(success_order);
+ else
+ platform_fence_after(failure_order);
+ expected = oldval;
+ return success;
+ }
+
+ bool
+ compare_exchange_weak(
+ value_type & expected,
+ value_type desired,
+ memory_order success_order,
+ memory_order failure_order) volatile BOOST_NOEXCEPT
+ {
+ return compare_exchange_strong(expected, desired, success_order, failure_order);
+ }
+
+ value_type
+ fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+#if defined(BOOST_ATOMIC_INTERLOCKED_AND64)
+ platform_fence_before(order);
+ v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&v_, v));
+ platform_fence_after(order);
+ return v;
+#else
+ value_type tmp = load(memory_order_relaxed);
+ for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
+ {
+ BOOST_ATOMIC_X86_PAUSE();
+ }
+ return tmp;
+#endif
+ }
+
+ value_type
+ fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+#if defined(BOOST_ATOMIC_INTERLOCKED_OR64)
+ platform_fence_before(order);
+ v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&v_, v));
+ platform_fence_after(order);
+ return v;
+#else
+ value_type tmp = load(memory_order_relaxed);
+ for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
+ {
+ BOOST_ATOMIC_X86_PAUSE();
+ }
+ return tmp;
+#endif
+ }
+
+ value_type
+ fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
+ {
+#if defined(BOOST_ATOMIC_INTERLOCKED_XOR64)
+ platform_fence_before(order);
+ v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&v_, v));
+ platform_fence_after(order);
+ return v;
+#else
+ value_type tmp = load(memory_order_relaxed);
+ for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
+ {
+ BOOST_ATOMIC_X86_PAUSE();
+ }
+ return tmp;
+#endif
+ }
+
+ bool
+ is_lock_free(void)const volatile BOOST_NOEXCEPT
+ {
+ return true;
+ }
+
+ BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
+private:
+ storage_type v_;
+};
template<typename T, bool Sign>
class base_atomic<T, void, 8, Sign>
{
+private:
typedef base_atomic this_type;
typedef T value_type;
typedef uint64_t storage_type;
+
+protected:
+ typedef value_type const& value_arg_type;
+
public:
+ BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
explicit base_atomic(value_type const& v) : v_(0)
{
memcpy(&v_, &v, sizeof(value_type));
}
- base_atomic(void) {}
void
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
@@ -1463,18 +1533,144 @@
}
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
+
+ BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
+ BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
+
private:
- base_atomic(const base_atomic &) /* = delete */ ;
- void operator=(const base_atomic &) /* = delete */ ;
storage_type v_;
};
-#endif // defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
+#elif defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
+
+template<typename T>
+inline bool
+platform_cmpxchg64_strong(T & expected, T desired, volatile T * p) BOOST_NOEXCEPT
+{
+#if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
+ const T oldval = BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(p, desired, expected);
+ const bool result = (oldval == expected);
+ expected = oldval;
+ return result;
+#else
+ bool result;
+ __asm
+ {
+ mov edi, p
+ mov esi, expected
+ mov ebx, dword ptr [desired]
+ mov ecx, dword ptr [desired + 4]
+ mov eax, dword ptr [esi]
+ mov edx, dword ptr [esi + 4]
+ lock cmpxchg8b qword ptr [edi]
+ mov dword ptr [esi], eax
+ mov dword ptr [esi + 4], edx
+ sete result
+ };
+ return result;
+#endif
+}
+
+// Intel 64 and IA-32 Architectures Software Developer's Manual, Volume 3A, 8.1.1. Guaranteed Atomic Operations:
+//
+// The Pentium processor (and newer processors since) guarantees that the following additional memory operations will always be carried out atomically:
+// * Reading or writing a quadword aligned on a 64-bit boundary
+//
+// Luckily, the memory is almost always 8-byte aligned in our case because atomic<> uses 64 bit native types for storage and dynamic memory allocations
+// have at least 8 byte alignment. The only unfortunate case is when atomic is placeod on the stack and it is not 8-byte aligned (like on 32 bit Windows).
+
+template<typename T>
+inline void
+platform_store64(T value, volatile T * p) BOOST_NOEXCEPT
+{
+ if (((uint32_t)p & 0x00000007) == 0)
+ {
+#if defined(_M_IX86_FP) && _M_IX86_FP >= 2
+ __asm
+ {
+ mov edx, p
+ movq xmm4, value
+ movq qword ptr [edx], xmm4
+ };
+#else
+ __asm
+ {
+ mov edx, p
+ fild value
+ fistp qword ptr [edx]
+ };
+#endif
+ }
+ else
+ {
+ __asm
+ {
+ mov edi, p
+ mov ebx, dword ptr [value]
+ mov ecx, dword ptr [value + 4]
+ mov eax, dword ptr [edi]
+ mov edx, dword ptr [edi + 4]
+ align 16
+again:
+ lock cmpxchg8b qword ptr [edi]
+ jne again
+ };
+ }
+}
+
+template<typename T>
+inline T
+platform_load64(const volatile T * p) BOOST_NOEXCEPT
+{
+ T value;
+
+ if (((uint32_t)p & 0x00000007) == 0)
+ {
+#if defined(_M_IX86_FP) && _M_IX86_FP >= 2
+ __asm
+ {
+ mov edx, p
+ movq xmm4, qword ptr [edx]
+ movq value, xmm4
+ };
+#else
+ __asm
+ {
+ mov edx, p
+ fild qword ptr [edx]
+ fistp value
+ };
+#endif
+ }
+ else
+ {
+ // We don't care for comparison result here; the previous value will be stored into value anyway.
+ // Also we don't care for ebx and ecx values, they just have to be equal to eax and edx before cmpxchg8b.
+ __asm
+ {
+ mov edi, p
+ mov eax, ebx
+ mov edx, ecx
+ lock cmpxchg8b qword ptr [edi]
+ mov dword ptr [value], eax
+ mov dword ptr [value + 4], edx
+ };
+ }
+
+ return value;
+}
+
+#endif
} // namespace detail
} // namespace atomics
} // namespace boost
+/* pull in 64-bit atomic type using cmpxchg8b above */
+#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
+#include <boost/atomic/detail/cas64strong.hpp>
+#endif
+
#endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */
#ifdef _MSC_VER
Modified: branches/release/libs/atomic/doc/atomic.qbk
==============================================================================
--- branches/release/libs/atomic/doc/atomic.qbk Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/libs/atomic/doc/atomic.qbk 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -577,10 +577,30 @@
[table
[[Macro] [Description]]
[
+ [`BOOST_ATOMIC_FLAG_LOCK_FREE`]
+ [Indicate whether `atomic_flag` is lock-free]
+ ]
+ [
+ [`BOOST_ATOMIC_BOOL_LOCK_FREE`]
+ [Indicate whether `atomic<bool>` is lock-free]
+ ]
+ [
[`BOOST_ATOMIC_CHAR_LOCK_FREE`]
[Indicate whether `atomic<char>` (including signed/unsigned variants) is lock-free]
]
[
+ [`BOOST_ATOMIC_CHAR16_T_LOCK_FREE`]
+ [Indicate whether `atomic<char16_t>` (including signed/unsigned variants) is lock-free]
+ ]
+ [
+ [`BOOST_ATOMIC_CHAR32_T_LOCK_FREE`]
+ [Indicate whether `atomic<char32_t>` (including signed/unsigned variants) is lock-free]
+ ]
+ [
+ [`BOOST_ATOMIC_WCHAR_T_LOCK_FREE`]
+ [Indicate whether `atomic<wchar_t>` (including signed/unsigned variants) is lock-free]
+ ]
+ [
[`BOOST_ATOMIC_SHORT_LOCK_FREE`]
[Indicate whether `atomic<short>` (including signed/unsigned variants) is lock-free]
]
@@ -597,9 +617,21 @@
[Indicate whether `atomic<long long>` (including signed/unsigned variants) is lock-free]
]
[
- [`BOOST_ATOMIC_ADDRESS_LOCK_FREE`]
+ [`BOOST_ATOMIC_INT128_LOCK_FREE`]
+ [Indicate whether `atomic<int128_type>` (including signed/unsigned variants) is lock-free. This macro is a non-standard extension.]
+ ]
+ [
+ [`BOOST_ATOMIC_ADDRESS_LOCK_FREE` or `BOOST_ATOMIC_POINTER_LOCK_FREE`]
[Indicate whether `atomic<T *>` is lock-free]
]
+ [
+ [`BOOST_ATOMIC_THREAD_FENCE`]
+ [Indicate whether `atomic_thread_fence` function is lock-free]
+ ]
+ [
+ [`BOOST_ATOMIC_SIGNAL_FENCE`]
+ [Indicate whether `atomic_signal_fence` function is lock-free]
+ ]
]
[endsect]
@@ -648,10 +680,10 @@
memory operations only in one direction. Since there is no
way to express this constraint to the compiler, these act
as "full compiler barriers" in this implementation. In corner
- cases this may lead to worse code than a C++11 compiler
+ cases this may result in a less efficient code than a C++11 compiler
could generate.
* [*No interprocess fallback]: using `atomic<T>` in shared memory only works
- correctly, if `atomic<T>::is_lock_free == true`
+ correctly, if `atomic<T>::is_lock_free() == true`
[endsect]
@@ -667,7 +699,7 @@
* [*native_api.cpp] verifies that all atomic operations have correct
value semantics (e.g. "fetch_add" really adds the desired value,
returning the previous). It is a rough "smoke-test" to help weed
- out the most obvious mistakes (for example with overflow,
+ out the most obvious mistakes (for example width overflow,
signed/unsigned extension, ...).
* [*lockfree.cpp] verifies that the [*BOOST_ATOMIC_*_LOCKFREE] macros
are set properly according to the expectations for a given
Modified: branches/release/libs/atomic/src/lockpool.cpp
==============================================================================
--- branches/release/libs/atomic/src/lockpool.cpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/libs/atomic/src/lockpool.cpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -1,3 +1,5 @@
+#include <cstddef>
+#include <boost/config.hpp>
#include <boost/atomic.hpp>
// Copyright (c) 2011 Helge Bahmann
@@ -10,13 +12,42 @@
namespace atomics {
namespace detail {
-static lockpool::lock_type lock_pool_[41];
+namespace {
+
+// This seems to be the maximum across all modern CPUs
+// NOTE: This constant is made as a macro because some compilers (gcc 4.4 for one) don't allow enums or regular constants in alignment attributes
+#define BOOST_ATOMIC_CACHE_LINE_SIZE 64
+
+template< unsigned int N >
+struct padding
+{
+ char data[N];
+};
+template< >
+struct padding< 0 >
+{
+};
+
+struct BOOST_ALIGNMENT(BOOST_ATOMIC_CACHE_LINE_SIZE) padded_lock
+{
+ lockpool::lock_type lock;
+ // The additional padding is needed to avoid false sharing between locks
+ enum { padding_size = (sizeof(lockpool::lock_type) <= BOOST_ATOMIC_CACHE_LINE_SIZE ?
+ (BOOST_ATOMIC_CACHE_LINE_SIZE - sizeof(lockpool::lock_type)) :
+ (BOOST_ATOMIC_CACHE_LINE_SIZE - sizeof(lockpool::lock_type) % BOOST_ATOMIC_CACHE_LINE_SIZE)) };
+ padding< padding_size > pad;
+};
+
+static padded_lock lock_pool_[41];
+
+} // namespace
+
// NOTE: This function must NOT be inline. Otherwise MSVC 9 will sometimes generate broken code for modulus operation which result in crashes.
BOOST_ATOMIC_DECL lockpool::lock_type& lockpool::get_lock_for(const volatile void* addr)
{
std::size_t index = reinterpret_cast<std::size_t>(addr) % (sizeof(lock_pool_) / sizeof(*lock_pool_));
- return lock_pool_[index];
+ return lock_pool_[index].lock;
}
}
Modified: branches/release/libs/atomic/test/api_test_helpers.hpp
==============================================================================
--- branches/release/libs/atomic/test/api_test_helpers.hpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/libs/atomic/test/api_test_helpers.hpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -265,8 +265,8 @@
test_additive_wrap<T>(0);
test_additive_wrap<T>((T) -1);
- test_additive_wrap<T>(-1LL << (sizeof(T) * 8 - 1));
- test_additive_wrap<T>(~ (-1LL << (sizeof(T) * 8 - 1)));
+ test_additive_wrap<T>(((T)-1) << (sizeof(T) * 8 - 1));
+ test_additive_wrap<T>(~ (((T)-1) << (sizeof(T) * 8 - 1)));
}
template<typename T>
Modified: branches/release/libs/atomic/test/lockfree.cpp
==============================================================================
--- branches/release/libs/atomic/test/lockfree.cpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/libs/atomic/test/lockfree.cpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -11,6 +11,7 @@
#include <iostream>
+#include <boost/config.hpp>
#include <boost/atomic.hpp>
#include <boost/test/minimal.hpp>
@@ -48,6 +49,7 @@
#else
#define EXPECT_LLONG_LOCK_FREE 0
#endif
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -58,6 +60,11 @@
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE 2
#define EXPECT_LLONG_LOCK_FREE 2
+#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG16B) && defined(BOOST_HAS_INT128)
+#define EXPECT_INT128_LOCK_FREE 2
+#else
+#define EXPECT_INT128_LOCK_FREE 0
+#endif
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -75,6 +82,7 @@
#else
#define EXPECT_LLONG_LOCK_FREE 0
#endif
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -88,6 +96,7 @@
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE 2
#define EXPECT_LLONG_LOCK_FREE 2
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -100,6 +109,7 @@
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE 2
#define EXPECT_LLONG_LOCK_FREE 0
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -110,6 +120,7 @@
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE 2
#define EXPECT_LLONG_LOCK_FREE 0
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -120,6 +131,7 @@
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE 2
#define EXPECT_LLONG_LOCK_FREE 0
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -129,11 +141,12 @@
#define EXPECT_SHORT_LOCK_FREE 2
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE 2
-#if defined(_WIN64)
+#if defined(_WIN64) || defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B) || defined(_M_AMD64) || defined(_M_IA64)
#define EXPECT_LLONG_LOCK_FREE 2
#else
#define EXPECT_LLONG_LOCK_FREE 0
#endif
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 2
#define EXPECT_BOOL_LOCK_FREE 2
@@ -144,6 +157,7 @@
#define EXPECT_INT_LOCK_FREE 2
#define EXPECT_LONG_LOCK_FREE (sizeof(long) <= 4 ? 2 : 0)
#define EXPECT_LLONG_LOCK_FREE (sizeof(long long) <= 4 ? 2 : 0)
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE (sizeof(void *) <= 4 ? 2 : 0)
#define EXPECT_BOOL_LOCK_FREE 2
@@ -154,6 +168,7 @@
#define EXPECT_INT_LOCK_FREE 0
#define EXPECT_LONG_LOCK_FREE 0
#define EXPECT_LLONG_LOCK_FREE 0
+#define EXPECT_INT128_LOCK_FREE 0
#define EXPECT_POINTER_LOCK_FREE 0
#define EXPECT_BOOL_LOCK_FREE 0
@@ -168,18 +183,21 @@
#ifdef BOOST_HAS_LONG_LONG
verify_lock_free<long long>("long long", BOOST_ATOMIC_LLONG_LOCK_FREE, EXPECT_LLONG_LOCK_FREE);
#endif
+#ifdef BOOST_HAS_INT128
+ verify_lock_free<boost::int128_type>("int128", BOOST_ATOMIC_INT128_LOCK_FREE, EXPECT_INT128_LOCK_FREE);
+#endif
verify_lock_free<void *>("void *", BOOST_ATOMIC_POINTER_LOCK_FREE, EXPECT_SHORT_LOCK_FREE);
verify_lock_free<bool>("bool", BOOST_ATOMIC_BOOL_LOCK_FREE, EXPECT_BOOL_LOCK_FREE);
bool any_lock_free =
- BOOST_ATOMIC_CHAR_LOCK_FREE ||
- BOOST_ATOMIC_SHORT_LOCK_FREE ||
- BOOST_ATOMIC_INT_LOCK_FREE ||
- BOOST_ATOMIC_LONG_LOCK_FREE ||
- BOOST_ATOMIC_LLONG_LOCK_FREE ||
- BOOST_ATOMIC_BOOL_LOCK_FREE;
+ BOOST_ATOMIC_CHAR_LOCK_FREE > 0 ||
+ BOOST_ATOMIC_SHORT_LOCK_FREE > 0 ||
+ BOOST_ATOMIC_INT_LOCK_FREE > 0 ||
+ BOOST_ATOMIC_LONG_LOCK_FREE > 0 ||
+ BOOST_ATOMIC_LLONG_LOCK_FREE > 0 ||
+ BOOST_ATOMIC_BOOL_LOCK_FREE > 0;
- BOOST_CHECK(!any_lock_free || BOOST_ATOMIC_THREAD_FENCE);
+ BOOST_CHECK(!any_lock_free || BOOST_ATOMIC_THREAD_FENCE > 0);
return 0;
}
Modified: branches/release/libs/atomic/test/native_api.cpp
==============================================================================
--- branches/release/libs/atomic/test/native_api.cpp Sat Jul 20 13:59:19 2013 (r85091)
+++ branches/release/libs/atomic/test/native_api.cpp 2013-07-20 14:01:35 EDT (Sat, 20 Jul 2013) (r85092)
@@ -4,6 +4,7 @@
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
+#include <boost/config.hpp>
#include <boost/atomic.hpp>
#include <boost/cstdint.hpp>
#include <boost/test/minimal.hpp>
@@ -33,6 +34,10 @@
test_integral_api<boost::int64_t>();
test_integral_api<long long>();
test_integral_api<unsigned long long>();
+#if defined(BOOST_HAS_INT128)
+ test_integral_api<boost::int128_type>();
+ test_integral_api<boost::uint128_type>();
+#endif
test_constexpr_ctor<char>();
test_constexpr_ctor<short>();
@@ -48,6 +53,9 @@
test_struct_api<test_struct<boost::uint16_t> >();
test_struct_api<test_struct<boost::uint32_t> >();
test_struct_api<test_struct<boost::uint64_t> >();
+#if defined(BOOST_HAS_INT128)
+ test_struct_api<test_struct<boost::uint128_type> >();
+#endif
test_large_struct_api();
Boost-Commit list run by bdawes at acm.org, david.abrahams at rcn.com, gregod at cs.rpi.edu, cpdaniel at pacbell.net, john at johnmaddock.co.uk