summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to '3rdParty/Boost/src/boost/atomic/detail/ops_msvc_arm.hpp')
-rw-r--r--3rdParty/Boost/src/boost/atomic/detail/ops_msvc_arm.hpp820
1 files changed, 820 insertions, 0 deletions
diff --git a/3rdParty/Boost/src/boost/atomic/detail/ops_msvc_arm.hpp b/3rdParty/Boost/src/boost/atomic/detail/ops_msvc_arm.hpp
new file mode 100644
index 0000000..349f7a5
--- /dev/null
+++ b/3rdParty/Boost/src/boost/atomic/detail/ops_msvc_arm.hpp
@@ -0,0 +1,820 @@
+/*
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ *
+ * Copyright (c) 2009 Helge Bahmann
+ * Copyright (c) 2012 Tim Blechmann
+ * Copyright (c) 2014 Andrey Semashev
+ */
+/*!
+ * \file atomic/detail/ops_msvc_arm.hpp
+ *
+ * This header contains implementation of the \c operations template.
+ */
+
+#ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
+#define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
+
+#include <intrin.h>
+#include <boost/memory_order.hpp>
+#include <boost/type_traits/make_signed.hpp>
+#include <boost/atomic/detail/config.hpp>
+#include <boost/atomic/detail/interlocked.hpp>
+#include <boost/atomic/detail/storage_type.hpp>
+#include <boost/atomic/detail/operations_fwd.hpp>
+#include <boost/atomic/capabilities.hpp>
+#include <boost/atomic/detail/ops_msvc_common.hpp>
+
+#ifdef BOOST_HAS_PRAGMA_ONCE
+#pragma once
+#endif
+
+#define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
+#define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
+#define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
+#define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
+#define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
+#define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
+#define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
+#define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
+
+namespace boost {
+namespace atomics {
+namespace detail {
+
+// A note about memory_order_consume. Technically, this architecture allows to avoid
+// unnecessary memory barrier after consume load since it supports data dependency ordering.
+// However, some compiler optimizations may break a seemingly valid code relying on data
+// dependency tracking by injecting bogus branches to aid out of order execution.
+// This may happen not only in Boost.Atomic code but also in user's code, which we have no
+// control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
+// For this reason we promote memory_order_consume to memory_order_acquire.
+
+struct msvc_arm_operations_base
+{
+ static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
+ {
+ __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
+ }
+
+ static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
+ {
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+
+ if ((order & memory_order_release) != 0)
+ hardware_full_fence();
+
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+ }
+
+ static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
+ {
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+
+ if (order == memory_order_seq_cst)
+ hardware_full_fence();
+
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+ }
+
+ static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
+ {
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+
+ if ((order & (memory_order_consume | memory_order_acquire)) != 0)
+ hardware_full_fence();
+
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+ }
+
+ static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
+ {
+ // Combine order flags together and promote memory_order_consume to memory_order_acquire
+ return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u));
+ }
+};
+
+template< typename T, typename Derived >
+struct msvc_arm_operations :
+ public msvc_arm_operations_base
+{
+ typedef T storage_type;
+
+ static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ typedef typename make_signed< storage_type >::type signed_storage_type;
+ return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
+ }
+
+ static BOOST_FORCEINLINE bool compare_exchange_weak(
+ storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
+ {
+ return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
+ }
+
+ static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
+ {
+ return !!Derived::exchange(storage, (storage_type)1, order);
+ }
+
+ static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
+ {
+ Derived::store(storage, (storage_type)0, order);
+ }
+
+ static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
+ {
+ return true;
+ }
+};
+
+template< bool Signed >
+struct operations< 1u, Signed > :
+ public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
+{
+ typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
+ typedef typename base_type::storage_type storage_type;
+
+ static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ base_type::fence_before_store(order);
+ BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
+ base_type::fence_after_store(order);
+ }
+
+ static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
+ {
+ storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
+ base_type::fence_after_load(order);
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE bool compare_exchange_strong(
+ storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
+ {
+ storage_type previous = expected, old_val;
+
+ switch (cas_common_order(success_order, failure_order))
+ {
+ case memory_order_relaxed:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
+ break;
+ case memory_order_release:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
+ break;
+ }
+ expected = old_val;
+
+ return (previous == old_val);
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
+ break;
+ }
+ return v;
+ }
+};
+
+template< bool Signed >
+struct operations< 2u, Signed > :
+ public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
+{
+ typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
+ typedef typename base_type::storage_type storage_type;
+
+ static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ base_type::fence_before_store(order);
+ BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
+ base_type::fence_after_store(order);
+ }
+
+ static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
+ {
+ storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
+ base_type::fence_after_load(order);
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE bool compare_exchange_strong(
+ storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
+ {
+ storage_type previous = expected, old_val;
+
+ switch (cas_common_order(success_order, failure_order))
+ {
+ case memory_order_relaxed:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
+ break;
+ case memory_order_release:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
+ break;
+ }
+ expected = old_val;
+
+ return (previous == old_val);
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
+ break;
+ }
+ return v;
+ }
+};
+
+template< bool Signed >
+struct operations< 4u, Signed > :
+ public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
+{
+ typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
+ typedef typename base_type::storage_type storage_type;
+
+ static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ base_type::fence_before_store(order);
+ BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
+ base_type::fence_after_store(order);
+ }
+
+ static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
+ {
+ storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
+ base_type::fence_after_load(order);
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE bool compare_exchange_strong(
+ storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
+ {
+ storage_type previous = expected, old_val;
+
+ switch (cas_common_order(success_order, failure_order))
+ {
+ case memory_order_relaxed:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
+ break;
+ case memory_order_release:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
+ break;
+ }
+ expected = old_val;
+
+ return (previous == old_val);
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
+ break;
+ }
+ return v;
+ }
+};
+
+template< bool Signed >
+struct operations< 8u, Signed > :
+ public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
+{
+ typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
+ typedef typename base_type::storage_type storage_type;
+
+ static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ base_type::fence_before_store(order);
+ BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
+ base_type::fence_after_store(order);
+ }
+
+ static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
+ {
+ storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
+ base_type::fence_after_load(order);
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE bool compare_exchange_strong(
+ storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
+ {
+ storage_type previous = expected, old_val;
+
+ switch (cas_common_order(success_order, failure_order))
+ {
+ case memory_order_relaxed:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
+ break;
+ case memory_order_release:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
+ break;
+ }
+ expected = old_val;
+
+ return (previous == old_val);
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
+ break;
+ }
+ return v;
+ }
+
+ static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
+ {
+ switch (order)
+ {
+ case memory_order_relaxed:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
+ break;
+ case memory_order_consume:
+ case memory_order_acquire:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
+ break;
+ case memory_order_release:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
+ break;
+ case memory_order_acq_rel:
+ case memory_order_seq_cst:
+ default:
+ v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
+ break;
+ }
+ return v;
+ }
+};
+
+
+BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
+{
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+ if (order != memory_order_relaxed)
+ msvc_arm_operations_base::hardware_full_fence();
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+}
+
+BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
+{
+ if (order != memory_order_relaxed)
+ BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
+}
+
+} // namespace detail
+} // namespace atomics
+} // namespace boost
+
+#undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
+#undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
+#undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
+#undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
+#undef BOOST_ATOMIC_DETAIL_ARM_STORE8
+#undef BOOST_ATOMIC_DETAIL_ARM_STORE16
+#undef BOOST_ATOMIC_DETAIL_ARM_STORE32
+#undef BOOST_ATOMIC_DETAIL_ARM_STORE64
+
+#endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_