#ifndef BOOST_DETAIL_RW_MUTEX_HPP_INCLUDED #define BOOST_DETAIL_RW_MUTEX_HPP_INCLUDED // MS compatible compilers support #pragma once #if defined(_MSC_VER) && (_MSC_VER >= 1020) # pragma once #endif // Copyright (c) 2005, 2006 Peter Dimov // // Distributed under the Boost Software License, Version 1.0. // See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #define RW_MUTEX_NAME "Terekhov/Hinnant, lock-free" #include #include #include #include #include template inline T atomic_load_none( T const * pt ) { return *static_cast( pt ); } template inline void atomic_store_rel( T * pt, T v ) { // x86-specific implementation *static_cast( pt ) = v; } inline bool atomic_compare_exchange( long * destination, long * comparand, long exchange ) { long ov = *comparand; long nv = BOOST_INTERLOCKED_COMPARE_EXCHANGE( destination, exchange, ov ); if( ov == nv ) { return true; } else { *comparand = nv; return false; } } class rw_mutex { private: rw_mutex( rw_mutex const & ); rw_mutex & operator=( rw_mutex const & ); private: boost::mutex mx_; boost::condition cn_r_; boost::condition cn_w_; enum { state_wp_mask = 0x80000000ul, // writer pending/active state_cw_mask = 0x40000000ul, // someone waiting on cn_w_ state_rd_mask = 0x3FFFFFFFul, // reader count }; long state_; public: rw_mutex(): state_( 0 ) { } ~rw_mutex() { } void rdlock() { long st = atomic_load_none( &state_ ); for( ;; ) { if( st & state_wp_mask ) { boost::mutex::scoped_lock lock( mx_ ); if( atomic_compare_exchange( &state_, &st, st | state_cw_mask ) ) { cn_w_.wait( lock ); st = state_; } } else { if( atomic_compare_exchange( &state_, &st, st + 1 ) ) return; // got read lock } } } void lock() { long st = atomic_load_none( &state_ ); for( ;; ) { if( st & state_wp_mask ) { boost::mutex::scoped_lock lock( mx_ ); if( atomic_compare_exchange( &state_, &st, st | state_cw_mask ) ) { cn_w_.wait( lock ); st = state_; } } else { if( atomic_compare_exchange( &state_, &st, st | state_wp_mask ) ) { if( st & state_rd_mask ) { boost::mutex::scoped_lock lock( mx_ ); while( state_ & state_rd_mask ) { cn_r_.wait( lock ); } } return; } } } } void rdunlock() { long st = BOOST_INTERLOCKED_EXCHANGE_ADD( &state_, -1 ); if( ( st & ~state_cw_mask ) == ( state_wp_mask | 1 ) ) // we were the last reader and a writer is waiting { { boost::mutex::scoped_lock lock( mx_ ); } cn_r_.notify_one(); } } void unlock() { long st = BOOST_INTERLOCKED_EXCHANGE( &state_, 0 ); if( st & state_cw_mask ) { { boost::mutex::scoped_lock lock( mx_ ); } cn_w_.notify_all(); } } public: //long retries( int ix ) //{ // return InterlockedExchange( &retries_[ ix ], 0 ); //} public: // lock classes class scoped_read_lock { private: rw_mutex & mx_; scoped_read_lock( scoped_read_lock const & ); scoped_read_lock & operator=( scoped_read_lock const & ); public: scoped_read_lock( rw_mutex & mx ): mx_( mx ) { mx_.rdlock(); } ~scoped_read_lock() { mx_.rdunlock(); } }; class scoped_write_lock { private: rw_mutex & mx_; scoped_write_lock( scoped_write_lock const & ); scoped_write_lock & operator=( scoped_write_lock const & ); public: scoped_write_lock( rw_mutex & mx ): mx_( mx ) { mx_.lock(); } ~scoped_write_lock() { mx_.unlock(); } }; }; #endif // #ifndef BOOST_DETAIL_RW_MUTEX_HPP_INCLUDED