mbed-os5 only for TYBLE16

Dependents:   TYBLE16_simple_data_logger TYBLE16_MP3_Air

Committer:
kenjiArai
Date:
Tue Dec 31 06:02:27 2019 +0000
Revision:
1:9db0e321a9f4
updated based on mbed-os5.15.0

Who changed what in which revision?

UserRevisionLine numberNew contents of line
kenjiArai 1:9db0e321a9f4 1 /*
kenjiArai 1:9db0e321a9f4 2 * Copyright (c) 2017 ARM Limited
kenjiArai 1:9db0e321a9f4 3 * SPDX-License-Identifier: Apache-2.0
kenjiArai 1:9db0e321a9f4 4 *
kenjiArai 1:9db0e321a9f4 5 * Licensed under the Apache License, Version 2.0 (the "License");
kenjiArai 1:9db0e321a9f4 6 * you may not use this file except in compliance with the License.
kenjiArai 1:9db0e321a9f4 7 * You may obtain a copy of the License at
kenjiArai 1:9db0e321a9f4 8 *
kenjiArai 1:9db0e321a9f4 9 * http://www.apache.org/licenses/LICENSE-2.0
kenjiArai 1:9db0e321a9f4 10 *
kenjiArai 1:9db0e321a9f4 11 * Unless required by applicable law or agreed to in writing, software
kenjiArai 1:9db0e321a9f4 12 * distributed under the License is distributed on an "AS IS" BASIS,
kenjiArai 1:9db0e321a9f4 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
kenjiArai 1:9db0e321a9f4 14 * See the License for the specific language governing permissions and
kenjiArai 1:9db0e321a9f4 15 * limitations under the License.
kenjiArai 1:9db0e321a9f4 16 */
kenjiArai 1:9db0e321a9f4 17
kenjiArai 1:9db0e321a9f4 18 #ifndef MSTD_ATOMIC_
kenjiArai 1:9db0e321a9f4 19 #define MSTD_ATOMIC_
kenjiArai 1:9db0e321a9f4 20
kenjiArai 1:9db0e321a9f4 21 #include <stddef.h>
kenjiArai 1:9db0e321a9f4 22 #include <stdint.h>
kenjiArai 1:9db0e321a9f4 23 #include <mstd_memory>
kenjiArai 1:9db0e321a9f4 24 #include <mstd_type_traits>
kenjiArai 1:9db0e321a9f4 25 #include <mstd_utility>
kenjiArai 1:9db0e321a9f4 26 #include "platform/mbed_assert.h"
kenjiArai 1:9db0e321a9f4 27 #include "platform/mbed_atomic.h"
kenjiArai 1:9db0e321a9f4 28 #include "platform/mbed_critical.h"
kenjiArai 1:9db0e321a9f4 29 #include "platform/CriticalSectionLock.h"
kenjiArai 1:9db0e321a9f4 30
kenjiArai 1:9db0e321a9f4 31 /*
kenjiArai 1:9db0e321a9f4 32 * mstd::atomic template and types are designed to be as close as possible to C++11
kenjiArai 1:9db0e321a9f4 33 * std::atomic. Key differences:
kenjiArai 1:9db0e321a9f4 34 *
kenjiArai 1:9db0e321a9f4 35 * - Operations are specified as atomic with respect to interrupts as well as
kenjiArai 1:9db0e321a9f4 36 * threads
kenjiArai 1:9db0e321a9f4 37 * - "Lock-free" indicates that a critical section is used, otherwise
kenjiArai 1:9db0e321a9f4 38 * exclusive accesses.
kenjiArai 1:9db0e321a9f4 39 * - Default initialization follows C17 and proposed C++2x rules - ie that
kenjiArai 1:9db0e321a9f4 40 * like normal objects they are zero-initialized if static or thread-local,
kenjiArai 1:9db0e321a9f4 41 * else in an indeterminate state when automatic. There is no ATOMIC_VAR_INIT()
kenjiArai 1:9db0e321a9f4 42 * equivalent.
kenjiArai 1:9db0e321a9f4 43 */
kenjiArai 1:9db0e321a9f4 44
kenjiArai 1:9db0e321a9f4 45 #ifndef MBED_EXCLUSIVE_ACCESS
kenjiArai 1:9db0e321a9f4 46 #define MSTD_ATOMIC_BOOL_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 47 #define MSTD_ATOMIC_CHAR_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 48 #define MSTD_ATOMIC_CHAR16_T_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 49 #define MSTD_ATOMIC_CHAR32_T_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 50 #define MSTD_ATOMIC_WCHAR_T_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 51 #define MSTD_ATOMIC_SHORT_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 52 #define MSTD_ATOMIC_INT_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 53 #define MSTD_ATOMIC_LONG_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 54 #define MSTD_ATOMIC_LLONG_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 55 #define MSTD_ATOMIC_POINTER_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 56 #else
kenjiArai 1:9db0e321a9f4 57 #define MSTD_ATOMIC_BOOL_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 58 #define MSTD_ATOMIC_CHAR_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 59 #define MSTD_ATOMIC_CHAR16_T_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 60 #define MSTD_ATOMIC_CHAR32_T_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 61 #define MSTD_ATOMIC_WCHAR_T_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 62 #define MSTD_ATOMIC_SHORT_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 63 #define MSTD_ATOMIC_INT_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 64 #define MSTD_ATOMIC_LONG_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 65 #define MSTD_ATOMIC_LLONG_LOCK_FREE 0
kenjiArai 1:9db0e321a9f4 66 #define MSTD_ATOMIC_POINTER_LOCK_FREE 2
kenjiArai 1:9db0e321a9f4 67 #endif
kenjiArai 1:9db0e321a9f4 68
kenjiArai 1:9db0e321a9f4 69 namespace mstd {
kenjiArai 1:9db0e321a9f4 70
kenjiArai 1:9db0e321a9f4 71 /** Atomic template
kenjiArai 1:9db0e321a9f4 72 *
kenjiArai 1:9db0e321a9f4 73 * `mstd::atomic<T>` is intended to work as per C++14 `std::atomic<T>`. `T` must be a
kenjiArai 1:9db0e321a9f4 74 * _TriviallyCopyable_, _CopyConstructible_ and _CopyAssignable_ type.
kenjiArai 1:9db0e321a9f4 75 * - All standard methods of `std::atomic` are supplied:
kenjiArai 1:9db0e321a9f4 76 * + For any `T`: `load`, `store`, `exchange`, `compare_exchange_weak`,
kenjiArai 1:9db0e321a9f4 77 * `compare_exchange_strong`, `operator T`, `operator=(T)`, `Atomic(T)`;
kenjiArai 1:9db0e321a9f4 78 * + For integers and pointers: `++`, `+=`, `--`, `-=`, `fetch_add`, `fetch_sub`;
kenjiArai 1:9db0e321a9f4 79 * + For integers: `&=`, `|=`, `^=`, `fetch_and`, `fetch_or`, `fetch_xor`.
kenjiArai 1:9db0e321a9f4 80 * - Operations are guaranteed atomic with respect to interrupts, and the
kenjiArai 1:9db0e321a9f4 81 * operations can be used in interrupts - `std::atomic` implementations don't specify this.
kenjiArai 1:9db0e321a9f4 82 * - Implementation is optimized for uniprocessor use (no DMB instructions),
kenjiArai 1:9db0e321a9f4 83 * unlike typical `std::atomic` implementations.
kenjiArai 1:9db0e321a9f4 84 * - Lock-free versions (LDREX/STREX) are used for user types if small enough and available,
kenjiArai 1:9db0e321a9f4 85 * otherwise critical sections are used.
kenjiArai 1:9db0e321a9f4 86 * - If used with large objects, interrupt latency may be impacted.
kenjiArai 1:9db0e321a9f4 87 * - Valid initialisation forms are:
kenjiArai 1:9db0e321a9f4 88 * + `atomic<int> foo;` (zero initialized if static or thread-local, else value indeterminate)
kenjiArai 1:9db0e321a9f4 89 * + `atomic_init(&foo, 2);` (initialize a default-initialized variable, once only, not atomic)
kenjiArai 1:9db0e321a9f4 90 * + `atomic<int> foo(2);` (value initialization)
kenjiArai 1:9db0e321a9f4 91 * + `atomic<int> foo = { 2 };` (also legal C11 with _Atomic int)
kenjiArai 1:9db0e321a9f4 92 * + `atomic<int> foo = 2;` (C++17 or later only - also legal C11 with _Atomic int)
kenjiArai 1:9db0e321a9f4 93 * Note that the lack of a copy constructor limits the simple-looking assignment initialization
kenjiArai 1:9db0e321a9f4 94 * to C++17 or later only.
kenjiArai 1:9db0e321a9f4 95 * - The value constructor is not available for small custom types.
kenjiArai 1:9db0e321a9f4 96 * - `MSTD_ATOMIC_XXX_LOCK_FREE` replaces `ATOMIC_XXX_LOCK_FREE` - "locking" forms
kenjiArai 1:9db0e321a9f4 97 * take a critical section, non-locking do not.
kenjiArai 1:9db0e321a9f4 98 * - For `bool`, integer types and pointers, storage is compatible with the
kenjiArai 1:9db0e321a9f4 99 * plain types. If necessary, they can be substituted as plain types for C
kenjiArai 1:9db0e321a9f4 100 * compatibility in headers, and accessed using core_util_atomic functions.
kenjiArai 1:9db0e321a9f4 101 * @code
kenjiArai 1:9db0e321a9f4 102 * struct foo {
kenjiArai 1:9db0e321a9f4 103 * #ifdef __cplusplus
kenjiArai 1:9db0e321a9f4 104 * mstd::atomic_uint32_t counter; // Use C++ templates from C++ code
kenjiArai 1:9db0e321a9f4 105 * #else
kenjiArai 1:9db0e321a9f4 106 * uint32_t counter; // Could use core_util_atomic_xxx_u32 from C code, or just have this for structure layout.
kenjiArai 1:9db0e321a9f4 107 * #endif
kenjiArai 1:9db0e321a9f4 108 * };
kenjiArai 1:9db0e321a9f4 109 * @endcode
kenjiArai 1:9db0e321a9f4 110 */
kenjiArai 1:9db0e321a9f4 111 template<typename T>
kenjiArai 1:9db0e321a9f4 112 class atomic;
kenjiArai 1:9db0e321a9f4 113
kenjiArai 1:9db0e321a9f4 114 /* Pull C enum from mbed_critical.h into mstd namespace */
kenjiArai 1:9db0e321a9f4 115 using memory_order = ::mbed_memory_order;
kenjiArai 1:9db0e321a9f4 116 constexpr memory_order memory_order_relaxed = mbed_memory_order_relaxed;
kenjiArai 1:9db0e321a9f4 117 constexpr memory_order memory_order_consume = mbed_memory_order_consume;
kenjiArai 1:9db0e321a9f4 118 constexpr memory_order memory_order_acquire = mbed_memory_order_acquire;
kenjiArai 1:9db0e321a9f4 119 constexpr memory_order memory_order_release = mbed_memory_order_release;
kenjiArai 1:9db0e321a9f4 120 constexpr memory_order memory_order_acq_rel = mbed_memory_order_acq_rel;
kenjiArai 1:9db0e321a9f4 121 constexpr memory_order memory_order_seq_cst = mbed_memory_order_seq_cst;
kenjiArai 1:9db0e321a9f4 122
kenjiArai 1:9db0e321a9f4 123 namespace impl {
kenjiArai 1:9db0e321a9f4 124
kenjiArai 1:9db0e321a9f4 125 /* For types up to uint64_t size, we use the mbed_critical.h functions with
kenjiArai 1:9db0e321a9f4 126 * uintX_t containers. Otherwise, we do it ourselves, with no special alignment.
kenjiArai 1:9db0e321a9f4 127 */
kenjiArai 1:9db0e321a9f4 128 // *INDENT-OFF*
kenjiArai 1:9db0e321a9f4 129 template<typename T>
kenjiArai 1:9db0e321a9f4 130 using atomic_container = conditional <sizeof(T) <= sizeof(uint8_t), uint8_t,
kenjiArai 1:9db0e321a9f4 131 conditional_t<sizeof(T) <= sizeof(uint16_t), uint16_t,
kenjiArai 1:9db0e321a9f4 132 conditional_t<sizeof(T) <= sizeof(uint32_t), uint32_t,
kenjiArai 1:9db0e321a9f4 133 conditional_t<sizeof(T) <= sizeof(uint64_t), uint64_t,
kenjiArai 1:9db0e321a9f4 134 T
kenjiArai 1:9db0e321a9f4 135 >>>>;
kenjiArai 1:9db0e321a9f4 136 // *INDENT-ON*
kenjiArai 1:9db0e321a9f4 137
kenjiArai 1:9db0e321a9f4 138 template<typename T>
kenjiArai 1:9db0e321a9f4 139 using atomic_container_t = typename atomic_container<T>::type;
kenjiArai 1:9db0e321a9f4 140
kenjiArai 1:9db0e321a9f4 141 template<typename N>
kenjiArai 1:9db0e321a9f4 142 struct atomic_container_is_lock_free;
kenjiArai 1:9db0e321a9f4 143
kenjiArai 1:9db0e321a9f4 144 template<>
kenjiArai 1:9db0e321a9f4 145 struct atomic_container_is_lock_free<uint8_t> : bool_constant<bool(MSTD_ATOMIC_CHAR_LOCK_FREE)> { };
kenjiArai 1:9db0e321a9f4 146
kenjiArai 1:9db0e321a9f4 147 template<>
kenjiArai 1:9db0e321a9f4 148 struct atomic_container_is_lock_free<uint16_t> : bool_constant<bool(MSTD_ATOMIC_SHORT_LOCK_FREE)> { };
kenjiArai 1:9db0e321a9f4 149
kenjiArai 1:9db0e321a9f4 150 template<>
kenjiArai 1:9db0e321a9f4 151 struct atomic_container_is_lock_free<uint32_t> : bool_constant<bool(MSTD_ATOMIC_INT_LOCK_FREE)> { };
kenjiArai 1:9db0e321a9f4 152
kenjiArai 1:9db0e321a9f4 153 template<>
kenjiArai 1:9db0e321a9f4 154 struct atomic_container_is_lock_free<uint64_t> : bool_constant<bool(MSTD_ATOMIC_LLONG_LOCK_FREE)> { };
kenjiArai 1:9db0e321a9f4 155
kenjiArai 1:9db0e321a9f4 156 template<typename T>
kenjiArai 1:9db0e321a9f4 157 using atomic_is_lock_free = atomic_container_is_lock_free<atomic_container_t<T>>;
kenjiArai 1:9db0e321a9f4 158
kenjiArai 1:9db0e321a9f4 159 /* If one order is given for compare_exchange, it's reduced for failure case that doesn't store */
kenjiArai 1:9db0e321a9f4 160 MBED_FORCEINLINE constexpr memory_order memorder_for_failure(memory_order order)
kenjiArai 1:9db0e321a9f4 161 {
kenjiArai 1:9db0e321a9f4 162 return order == memory_order_acq_rel ? memory_order_acquire :
kenjiArai 1:9db0e321a9f4 163 order == memory_order_release ? memory_order_relaxed : order;
kenjiArai 1:9db0e321a9f4 164 }
kenjiArai 1:9db0e321a9f4 165
kenjiArai 1:9db0e321a9f4 166 /* Base template for a raw Atomic (arbitrary type T), using atomic storage size N.
kenjiArai 1:9db0e321a9f4 167 * This generic implementation uses critical sections and has no alignment requirements.
kenjiArai 1:9db0e321a9f4 168 * There are specialisations for smaller sizes below.
kenjiArai 1:9db0e321a9f4 169 */
kenjiArai 1:9db0e321a9f4 170 template<typename T, typename = void>
kenjiArai 1:9db0e321a9f4 171 struct AtomicBaseRaw {
kenjiArai 1:9db0e321a9f4 172 using value_type = T;
kenjiArai 1:9db0e321a9f4 173 AtomicBaseRaw() noexcept = default;
kenjiArai 1:9db0e321a9f4 174 constexpr AtomicBaseRaw(T v) noexcept : data(std::move(v))
kenjiArai 1:9db0e321a9f4 175 {
kenjiArai 1:9db0e321a9f4 176 }
kenjiArai 1:9db0e321a9f4 177 bool is_lock_free() const volatile noexcept
kenjiArai 1:9db0e321a9f4 178 {
kenjiArai 1:9db0e321a9f4 179 return false;
kenjiArai 1:9db0e321a9f4 180 }
kenjiArai 1:9db0e321a9f4 181 T load(memory_order order = memory_order_seq_cst) const volatile noexcept
kenjiArai 1:9db0e321a9f4 182 {
kenjiArai 1:9db0e321a9f4 183 MBED_CHECK_LOAD_ORDER(order);
kenjiArai 1:9db0e321a9f4 184 // Cope with T not having default constructor
kenjiArai 1:9db0e321a9f4 185 union {
kenjiArai 1:9db0e321a9f4 186 char c[sizeof(T)];
kenjiArai 1:9db0e321a9f4 187 T val;
kenjiArai 1:9db0e321a9f4 188 } ret;
kenjiArai 1:9db0e321a9f4 189 {
kenjiArai 1:9db0e321a9f4 190 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 191 memcpy(std::addressof(ret.val), const_cast<const T *>(std::addressof(data)), sizeof(T));
kenjiArai 1:9db0e321a9f4 192 }
kenjiArai 1:9db0e321a9f4 193 return std::move(ret.val);
kenjiArai 1:9db0e321a9f4 194 }
kenjiArai 1:9db0e321a9f4 195 T load(memory_order order = memory_order_seq_cst) const noexcept
kenjiArai 1:9db0e321a9f4 196 {
kenjiArai 1:9db0e321a9f4 197 MBED_CHECK_LOAD_ORDER(order);
kenjiArai 1:9db0e321a9f4 198 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 199 return data;
kenjiArai 1:9db0e321a9f4 200 }
kenjiArai 1:9db0e321a9f4 201 void store(T desired, memory_order order = memory_order_seq_cst) volatile noexcept
kenjiArai 1:9db0e321a9f4 202 {
kenjiArai 1:9db0e321a9f4 203 MBED_CHECK_STORE_ORDER(order);
kenjiArai 1:9db0e321a9f4 204 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 205 memcpy(const_cast<T *>(std::addressof(data)), std::addressof(desired), sizeof(T));
kenjiArai 1:9db0e321a9f4 206 }
kenjiArai 1:9db0e321a9f4 207 void store(T desired, memory_order order = memory_order_seq_cst) noexcept
kenjiArai 1:9db0e321a9f4 208 {
kenjiArai 1:9db0e321a9f4 209 MBED_CHECK_STORE_ORDER(order);
kenjiArai 1:9db0e321a9f4 210 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 211 data = std::move(desired); // MoveAssignable
kenjiArai 1:9db0e321a9f4 212 }
kenjiArai 1:9db0e321a9f4 213 T exchange(T desired, memory_order = memory_order_seq_cst) volatile noexcept
kenjiArai 1:9db0e321a9f4 214 {
kenjiArai 1:9db0e321a9f4 215 // Cope with T not having default constructor
kenjiArai 1:9db0e321a9f4 216 union {
kenjiArai 1:9db0e321a9f4 217 char c[sizeof(T)];
kenjiArai 1:9db0e321a9f4 218 T val;
kenjiArai 1:9db0e321a9f4 219 } old;
kenjiArai 1:9db0e321a9f4 220 {
kenjiArai 1:9db0e321a9f4 221 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 222 memcpy(std::addressof(old.val), const_cast<const T *>(std::addressof(data)), sizeof(T));
kenjiArai 1:9db0e321a9f4 223 memcpy(const_cast<T *>(std::addressof(data)), std::addressof(desired), sizeof(T));
kenjiArai 1:9db0e321a9f4 224 }
kenjiArai 1:9db0e321a9f4 225 return old.val;
kenjiArai 1:9db0e321a9f4 226 }
kenjiArai 1:9db0e321a9f4 227 T exchange(T desired, memory_order = memory_order_seq_cst) noexcept
kenjiArai 1:9db0e321a9f4 228 {
kenjiArai 1:9db0e321a9f4 229 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 230 T old = std::move(data); // MoveConstructible
kenjiArai 1:9db0e321a9f4 231 data = std::move(desired); // MoveAssignable
kenjiArai 1:9db0e321a9f4 232 return old;
kenjiArai 1:9db0e321a9f4 233 }
kenjiArai 1:9db0e321a9f4 234 bool compare_exchange_strong(T &expected, T desired, memory_order success, memory_order failure) volatile noexcept
kenjiArai 1:9db0e321a9f4 235 {
kenjiArai 1:9db0e321a9f4 236 MBED_CHECK_CAS_ORDER(success, failure);
kenjiArai 1:9db0e321a9f4 237 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 238 if (memcmp(const_cast<const T *>(std::addressof(data)), std::addressof(expected), sizeof(T)) == 0) {
kenjiArai 1:9db0e321a9f4 239 memcpy(const_cast<T *>(std::addressof(data)), std::addressof(desired), sizeof(T));
kenjiArai 1:9db0e321a9f4 240 return true;
kenjiArai 1:9db0e321a9f4 241 } else {
kenjiArai 1:9db0e321a9f4 242 memcpy(std::addressof(expected), const_cast<const T *>(std::addressof(data)), sizeof(T));
kenjiArai 1:9db0e321a9f4 243 return false;
kenjiArai 1:9db0e321a9f4 244 }
kenjiArai 1:9db0e321a9f4 245 }
kenjiArai 1:9db0e321a9f4 246 bool compare_exchange_strong(T &expected, T desired, memory_order success, memory_order failure) noexcept
kenjiArai 1:9db0e321a9f4 247 {
kenjiArai 1:9db0e321a9f4 248 MBED_CHECK_CAS_ORDER(success, failure);
kenjiArai 1:9db0e321a9f4 249 CriticalSectionLock lock;
kenjiArai 1:9db0e321a9f4 250 if (memcmp(std::addressof(data), std::addressof(expected), sizeof(T)) == 0) {
kenjiArai 1:9db0e321a9f4 251 data = std::move(desired); // MoveAssignable
kenjiArai 1:9db0e321a9f4 252 return true;
kenjiArai 1:9db0e321a9f4 253 } else {
kenjiArai 1:9db0e321a9f4 254 expected = data; // CopyAssignable
kenjiArai 1:9db0e321a9f4 255 return false;
kenjiArai 1:9db0e321a9f4 256 }
kenjiArai 1:9db0e321a9f4 257 }
kenjiArai 1:9db0e321a9f4 258 bool compare_exchange_weak(T &expected, T desired, memory_order success, memory_order failure) volatile noexcept
kenjiArai 1:9db0e321a9f4 259 {
kenjiArai 1:9db0e321a9f4 260 return compare_exchange_strong(expected, desired, success, failure);
kenjiArai 1:9db0e321a9f4 261 }
kenjiArai 1:9db0e321a9f4 262 bool compare_exchange_weak(T &expected, T desired, memory_order success, memory_order failure) noexcept
kenjiArai 1:9db0e321a9f4 263 {
kenjiArai 1:9db0e321a9f4 264 return compare_exchange_strong(expected, desired, success, failure);
kenjiArai 1:9db0e321a9f4 265 }
kenjiArai 1:9db0e321a9f4 266 bool compare_exchange_strong(T &expected, T desired, memory_order order = memory_order_seq_cst) volatile noexcept
kenjiArai 1:9db0e321a9f4 267 {
kenjiArai 1:9db0e321a9f4 268 return compare_exchange_strong(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 269 }
kenjiArai 1:9db0e321a9f4 270 bool compare_exchange_strong(T &expected, T desired, memory_order order = memory_order_seq_cst) noexcept
kenjiArai 1:9db0e321a9f4 271 {
kenjiArai 1:9db0e321a9f4 272 return compare_exchange_strong(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 273 }
kenjiArai 1:9db0e321a9f4 274 bool compare_exchange_weak(T &expected, T desired, memory_order order = memory_order_seq_cst) volatile noexcept
kenjiArai 1:9db0e321a9f4 275 {
kenjiArai 1:9db0e321a9f4 276 return compare_exchange_weak(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 277 }
kenjiArai 1:9db0e321a9f4 278 bool compare_exchange_weak(T &expected, T desired, memory_order order = memory_order_seq_cst) noexcept
kenjiArai 1:9db0e321a9f4 279 {
kenjiArai 1:9db0e321a9f4 280 return compare_exchange_weak(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 281 }
kenjiArai 1:9db0e321a9f4 282 protected:
kenjiArai 1:9db0e321a9f4 283 union {
kenjiArai 1:9db0e321a9f4 284 // Having the union makes us just get zero-initialised, as per std::atomic, or our specializations,
kenjiArai 1:9db0e321a9f4 285 // rather than actually running T's default constructor.
kenjiArai 1:9db0e321a9f4 286 char dummy_for_zero_init;
kenjiArai 1:9db0e321a9f4 287 T data;
kenjiArai 1:9db0e321a9f4 288 };
kenjiArai 1:9db0e321a9f4 289 void init(T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 290 {
kenjiArai 1:9db0e321a9f4 291 memcpy(const_cast<T *>(std::addressof(data)), std::addressof(desired), sizeof(T));
kenjiArai 1:9db0e321a9f4 292 }
kenjiArai 1:9db0e321a9f4 293 void init(T desired) noexcept
kenjiArai 1:9db0e321a9f4 294 {
kenjiArai 1:9db0e321a9f4 295 data = std::move(desired);
kenjiArai 1:9db0e321a9f4 296 }
kenjiArai 1:9db0e321a9f4 297 };
kenjiArai 1:9db0e321a9f4 298
kenjiArai 1:9db0e321a9f4 299 template<typename T, typename A, int N>
kenjiArai 1:9db0e321a9f4 300 struct AtomicSmallStoragePadded {
kenjiArai 1:9db0e321a9f4 301 union {
kenjiArai 1:9db0e321a9f4 302 A u;
kenjiArai 1:9db0e321a9f4 303 struct {
kenjiArai 1:9db0e321a9f4 304 T data;
kenjiArai 1:9db0e321a9f4 305 char pad[N];
kenjiArai 1:9db0e321a9f4 306 };
kenjiArai 1:9db0e321a9f4 307 };
kenjiArai 1:9db0e321a9f4 308 AtomicSmallStoragePadded() noexcept = default;
kenjiArai 1:9db0e321a9f4 309 constexpr AtomicSmallStoragePadded(T v) noexcept : data(std::move(v)), pad{0}
kenjiArai 1:9db0e321a9f4 310 {
kenjiArai 1:9db0e321a9f4 311 }
kenjiArai 1:9db0e321a9f4 312 constexpr AtomicSmallStoragePadded(A v) noexcept : u(v)
kenjiArai 1:9db0e321a9f4 313 {
kenjiArai 1:9db0e321a9f4 314 }
kenjiArai 1:9db0e321a9f4 315 };
kenjiArai 1:9db0e321a9f4 316
kenjiArai 1:9db0e321a9f4 317 template<typename T, typename A>
kenjiArai 1:9db0e321a9f4 318 struct AtomicSmallStorageUnpadded {
kenjiArai 1:9db0e321a9f4 319 union {
kenjiArai 1:9db0e321a9f4 320 A u;
kenjiArai 1:9db0e321a9f4 321 T data;
kenjiArai 1:9db0e321a9f4 322 };
kenjiArai 1:9db0e321a9f4 323 AtomicSmallStorageUnpadded() noexcept = default;
kenjiArai 1:9db0e321a9f4 324 constexpr AtomicSmallStorageUnpadded(T v) noexcept : data(std::move(v))
kenjiArai 1:9db0e321a9f4 325 {
kenjiArai 1:9db0e321a9f4 326 }
kenjiArai 1:9db0e321a9f4 327 constexpr AtomicSmallStorageUnpadded(A v) noexcept : u(v)
kenjiArai 1:9db0e321a9f4 328 {
kenjiArai 1:9db0e321a9f4 329 }
kenjiArai 1:9db0e321a9f4 330 };
kenjiArai 1:9db0e321a9f4 331
kenjiArai 1:9db0e321a9f4 332 // *INDENT-OFF*
kenjiArai 1:9db0e321a9f4 333 template<typename T, typename Storage = atomic_container_t<T>>
kenjiArai 1:9db0e321a9f4 334 using AtomicSmallStorage = std::conditional_t<sizeof(Storage) == sizeof(T),
kenjiArai 1:9db0e321a9f4 335 AtomicSmallStorageUnpadded<T, Storage>,
kenjiArai 1:9db0e321a9f4 336 AtomicSmallStoragePadded<T, Storage, sizeof(Storage) - sizeof(T)>>;
kenjiArai 1:9db0e321a9f4 337 // *INDENT-ON*
kenjiArai 1:9db0e321a9f4 338
kenjiArai 1:9db0e321a9f4 339 /* Base implementation specialisation for arbitrary small type T of size N,
kenjiArai 1:9db0e321a9f4 340 * using corresponding atomic_xxx functions acting on uintX_t data type A of that size.
kenjiArai 1:9db0e321a9f4 341 * This does involve type punning on a union, so isn't strictly legal, but it's no worse than
kenjiArai 1:9db0e321a9f4 342 * what has always been done with the pointer atomics.
kenjiArai 1:9db0e321a9f4 343 * Always pad when necessary so that compare-exchange works.
kenjiArai 1:9db0e321a9f4 344 *
kenjiArai 1:9db0e321a9f4 345 * It's only worth using the specific small form if there is a real lock-free implementation.
kenjiArai 1:9db0e321a9f4 346 * Otherwise the overhead of shuffling in and out of the integer container is larger than just
kenjiArai 1:9db0e321a9f4 347 * doing it directly.
kenjiArai 1:9db0e321a9f4 348 */
kenjiArai 1:9db0e321a9f4 349 template<typename T>
kenjiArai 1:9db0e321a9f4 350 class AtomicBaseRaw<T, std::enable_if_t<atomic_is_lock_free<T>::value>> {
kenjiArai 1:9db0e321a9f4 351 AtomicSmallStorage<T> storage;
kenjiArai 1:9db0e321a9f4 352 public:
kenjiArai 1:9db0e321a9f4 353 using value_type = T;
kenjiArai 1:9db0e321a9f4 354 AtomicBaseRaw() noexcept = default;
kenjiArai 1:9db0e321a9f4 355 constexpr AtomicBaseRaw(T v) : storage(std::move(v)) { }
kenjiArai 1:9db0e321a9f4 356 bool is_lock_free() const volatile noexcept
kenjiArai 1:9db0e321a9f4 357 {
kenjiArai 1:9db0e321a9f4 358 return atomic_is_lock_free<T>::value;
kenjiArai 1:9db0e321a9f4 359 }
kenjiArai 1:9db0e321a9f4 360 T load() const volatile noexcept
kenjiArai 1:9db0e321a9f4 361 {
kenjiArai 1:9db0e321a9f4 362 AtomicSmallStorage<T> loaded(core_util_atomic_load(&storage.u));
kenjiArai 1:9db0e321a9f4 363 return loaded.data;
kenjiArai 1:9db0e321a9f4 364 }
kenjiArai 1:9db0e321a9f4 365 T load(memory_order order) const volatile noexcept
kenjiArai 1:9db0e321a9f4 366 {
kenjiArai 1:9db0e321a9f4 367 AtomicSmallStorage<T> loaded(core_util_atomic_load_explicit(&storage.u, order));
kenjiArai 1:9db0e321a9f4 368 return loaded.data;
kenjiArai 1:9db0e321a9f4 369 }
kenjiArai 1:9db0e321a9f4 370 T load() const noexcept
kenjiArai 1:9db0e321a9f4 371 {
kenjiArai 1:9db0e321a9f4 372 AtomicSmallStorage<T> loaded(core_util_atomic_load(&storage.u));
kenjiArai 1:9db0e321a9f4 373 return loaded.data;
kenjiArai 1:9db0e321a9f4 374 }
kenjiArai 1:9db0e321a9f4 375 T load(memory_order order) const noexcept
kenjiArai 1:9db0e321a9f4 376 {
kenjiArai 1:9db0e321a9f4 377 AtomicSmallStorage<T> loaded(core_util_atomic_load_explicit(&storage.u, order));
kenjiArai 1:9db0e321a9f4 378 return loaded.data;
kenjiArai 1:9db0e321a9f4 379 }
kenjiArai 1:9db0e321a9f4 380 void store(T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 381 {
kenjiArai 1:9db0e321a9f4 382 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 383 core_util_atomic_store(&storage.u, tostore.u);
kenjiArai 1:9db0e321a9f4 384 }
kenjiArai 1:9db0e321a9f4 385 void store(T desired, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 386 {
kenjiArai 1:9db0e321a9f4 387 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 388 core_util_atomic_store_explicit(&storage.u, tostore.u, order);
kenjiArai 1:9db0e321a9f4 389 }
kenjiArai 1:9db0e321a9f4 390 void store(T desired) noexcept
kenjiArai 1:9db0e321a9f4 391 {
kenjiArai 1:9db0e321a9f4 392 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 393 core_util_atomic_store(&storage.u, tostore.u);
kenjiArai 1:9db0e321a9f4 394 }
kenjiArai 1:9db0e321a9f4 395 void store(T desired, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 396 {
kenjiArai 1:9db0e321a9f4 397 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 398 core_util_atomic_store_explicit(&storage.u, tostore.u, order);
kenjiArai 1:9db0e321a9f4 399 }
kenjiArai 1:9db0e321a9f4 400 T exchange(T desired, memory_order = memory_order_seq_cst) volatile noexcept
kenjiArai 1:9db0e321a9f4 401 {
kenjiArai 1:9db0e321a9f4 402 AtomicSmallStorage<T> exchanged(desired);
kenjiArai 1:9db0e321a9f4 403 exchanged.u = core_util_atomic_exchange(&storage.u, exchanged.u);
kenjiArai 1:9db0e321a9f4 404 return exchanged.data;
kenjiArai 1:9db0e321a9f4 405 }
kenjiArai 1:9db0e321a9f4 406 bool compare_exchange_strong(T &expected, T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 407 {
kenjiArai 1:9db0e321a9f4 408 AtomicSmallStorage<T> expcur(expected);
kenjiArai 1:9db0e321a9f4 409 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 410 bool result = core_util_atomic_compare_exchange_strong(&storage.u, &expcur.u, tostore.u);
kenjiArai 1:9db0e321a9f4 411 if (!result) {
kenjiArai 1:9db0e321a9f4 412 expected = expcur.data;
kenjiArai 1:9db0e321a9f4 413 }
kenjiArai 1:9db0e321a9f4 414 return result;
kenjiArai 1:9db0e321a9f4 415 }
kenjiArai 1:9db0e321a9f4 416 bool compare_exchange_strong(T &expected, T desired, memory_order success, memory_order failure) volatile noexcept
kenjiArai 1:9db0e321a9f4 417 {
kenjiArai 1:9db0e321a9f4 418 AtomicSmallStorage<T> expcur(expected);
kenjiArai 1:9db0e321a9f4 419 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 420 bool result = core_util_atomic_compare_exchange_strong_explicit(&storage.u, &expcur.u, tostore.u, success, failure);
kenjiArai 1:9db0e321a9f4 421 if (!result) {
kenjiArai 1:9db0e321a9f4 422 expected = expcur.data;
kenjiArai 1:9db0e321a9f4 423 }
kenjiArai 1:9db0e321a9f4 424 return result;
kenjiArai 1:9db0e321a9f4 425 }
kenjiArai 1:9db0e321a9f4 426 bool compare_exchange_strong(T &expected, T desired, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 427 {
kenjiArai 1:9db0e321a9f4 428 return compare_exchange_strong(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 429 }
kenjiArai 1:9db0e321a9f4 430 bool compare_exchange_weak(T &expected, T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 431 {
kenjiArai 1:9db0e321a9f4 432 AtomicSmallStorage<T> expcur(expected);
kenjiArai 1:9db0e321a9f4 433 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 434 bool result = core_util_atomic_compare_exchange_weak(&storage.u, &expcur.u, tostore.u);
kenjiArai 1:9db0e321a9f4 435 if (!result) {
kenjiArai 1:9db0e321a9f4 436 expected = expcur.data;
kenjiArai 1:9db0e321a9f4 437 }
kenjiArai 1:9db0e321a9f4 438 return result;
kenjiArai 1:9db0e321a9f4 439 }
kenjiArai 1:9db0e321a9f4 440 bool compare_exchange_weak(T &expected, T desired, memory_order success, memory_order failure) volatile noexcept
kenjiArai 1:9db0e321a9f4 441 {
kenjiArai 1:9db0e321a9f4 442 AtomicSmallStorage<T> expcur(expected);
kenjiArai 1:9db0e321a9f4 443 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 444 bool result = core_util_atomic_compare_exchange_weak_explicit(&storage.u, &expcur.u, tostore.u, success, failure);
kenjiArai 1:9db0e321a9f4 445 if (!result) {
kenjiArai 1:9db0e321a9f4 446 expected = expcur.data;
kenjiArai 1:9db0e321a9f4 447 }
kenjiArai 1:9db0e321a9f4 448 return result;
kenjiArai 1:9db0e321a9f4 449 }
kenjiArai 1:9db0e321a9f4 450 bool compare_exchange_weak(T &expected, T desired, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 451 {
kenjiArai 1:9db0e321a9f4 452 return compare_exchange_weak(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 453 }
kenjiArai 1:9db0e321a9f4 454 protected:
kenjiArai 1:9db0e321a9f4 455 void init(T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 456 {
kenjiArai 1:9db0e321a9f4 457 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 458 memcpy(const_cast<decltype(tostore.u) *>(&storage.u), &tostore.u, sizeof storage.u);
kenjiArai 1:9db0e321a9f4 459 }
kenjiArai 1:9db0e321a9f4 460 void init(T desired) noexcept
kenjiArai 1:9db0e321a9f4 461 {
kenjiArai 1:9db0e321a9f4 462 AtomicSmallStorage<T> tostore(desired);
kenjiArai 1:9db0e321a9f4 463 storage.u = std::move(tostore.u);
kenjiArai 1:9db0e321a9f4 464 }
kenjiArai 1:9db0e321a9f4 465 };
kenjiArai 1:9db0e321a9f4 466
kenjiArai 1:9db0e321a9f4 467 /* Template for an integer or pointer Atomic of type T, using atomic storage A
kenjiArai 1:9db0e321a9f4 468 * Same functionality as AtomicBaseRaw, but can use simpler implementation using casts
kenjiArai 1:9db0e321a9f4 469 * to convert between type T and the underlying storage type. Doesn't change functionality,
kenjiArai 1:9db0e321a9f4 470 * compilers can generally optimise this better (will particularly help in debug build).
kenjiArai 1:9db0e321a9f4 471 * C casts must be used as reinterpret_cast can't handle integer<->integer, and static_cast
kenjiArai 1:9db0e321a9f4 472 * can't handle pointer<->integer. Note that
kenjiArai 1:9db0e321a9f4 473 * we always have A be unsigned, so that our arithmetic is unsigned and defined on overflow.
kenjiArai 1:9db0e321a9f4 474 * Compilers can and do treat signed arithmetic overflow as undefined, but not cast overflow.
kenjiArai 1:9db0e321a9f4 475 * (C++20 explicitly defines assignment of unsigned to signed as 2's-complement).
kenjiArai 1:9db0e321a9f4 476 * Our data field is of type T, not A, to permit value/aggregate initialisation.
kenjiArai 1:9db0e321a9f4 477 */
kenjiArai 1:9db0e321a9f4 478 template<typename T, typename A = atomic_container_t<T>>
kenjiArai 1:9db0e321a9f4 479 struct AtomicBaseInt {
kenjiArai 1:9db0e321a9f4 480 MBED_STRUCT_STATIC_ASSERT(sizeof(T) == sizeof(A), "AtomicBaseInt size mismatch");
kenjiArai 1:9db0e321a9f4 481 using value_type = T;
kenjiArai 1:9db0e321a9f4 482 AtomicBaseInt() noexcept = default;
kenjiArai 1:9db0e321a9f4 483 constexpr AtomicBaseInt(T v) noexcept : u(A(v))
kenjiArai 1:9db0e321a9f4 484 {
kenjiArai 1:9db0e321a9f4 485 }
kenjiArai 1:9db0e321a9f4 486 bool is_lock_free() const volatile noexcept
kenjiArai 1:9db0e321a9f4 487 {
kenjiArai 1:9db0e321a9f4 488 return atomic_container_is_lock_free<A>::value;
kenjiArai 1:9db0e321a9f4 489 }
kenjiArai 1:9db0e321a9f4 490 T load() const volatile noexcept
kenjiArai 1:9db0e321a9f4 491 {
kenjiArai 1:9db0e321a9f4 492 return T(core_util_atomic_load(&u));
kenjiArai 1:9db0e321a9f4 493 }
kenjiArai 1:9db0e321a9f4 494 T load(memory_order order) const volatile noexcept
kenjiArai 1:9db0e321a9f4 495 {
kenjiArai 1:9db0e321a9f4 496 return T(core_util_atomic_load_explicit(&u, order));
kenjiArai 1:9db0e321a9f4 497 }
kenjiArai 1:9db0e321a9f4 498 T load() const noexcept
kenjiArai 1:9db0e321a9f4 499 {
kenjiArai 1:9db0e321a9f4 500 return T(core_util_atomic_load(&u));
kenjiArai 1:9db0e321a9f4 501 }
kenjiArai 1:9db0e321a9f4 502 T load(memory_order order) const noexcept
kenjiArai 1:9db0e321a9f4 503 {
kenjiArai 1:9db0e321a9f4 504 return T(core_util_atomic_load_explicit(&u, order));
kenjiArai 1:9db0e321a9f4 505 }
kenjiArai 1:9db0e321a9f4 506 void store(T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 507 {
kenjiArai 1:9db0e321a9f4 508 core_util_atomic_store(&u, A(desired));
kenjiArai 1:9db0e321a9f4 509 }
kenjiArai 1:9db0e321a9f4 510 void store(T desired, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 511 {
kenjiArai 1:9db0e321a9f4 512 core_util_atomic_store_explicit(&u, A(desired), order);
kenjiArai 1:9db0e321a9f4 513 }
kenjiArai 1:9db0e321a9f4 514 void store(T desired) noexcept
kenjiArai 1:9db0e321a9f4 515 {
kenjiArai 1:9db0e321a9f4 516 core_util_atomic_store(&u, A(desired));
kenjiArai 1:9db0e321a9f4 517 }
kenjiArai 1:9db0e321a9f4 518 void store(T desired, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 519 {
kenjiArai 1:9db0e321a9f4 520 core_util_atomic_store_explicit(&u, A(desired), order);
kenjiArai 1:9db0e321a9f4 521 }
kenjiArai 1:9db0e321a9f4 522 T exchange(T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 523 {
kenjiArai 1:9db0e321a9f4 524 A d = A(desired);
kenjiArai 1:9db0e321a9f4 525 return T(core_util_atomic_exchange(&u, d));
kenjiArai 1:9db0e321a9f4 526 }
kenjiArai 1:9db0e321a9f4 527 T exchange(T desired, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 528 {
kenjiArai 1:9db0e321a9f4 529 A d = A(desired);
kenjiArai 1:9db0e321a9f4 530 return T(core_util_atomic_exchange_explicit(&u, d, order));
kenjiArai 1:9db0e321a9f4 531 }
kenjiArai 1:9db0e321a9f4 532 bool compare_exchange_strong(T &expected, T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 533 {
kenjiArai 1:9db0e321a9f4 534 A *expcur = reinterpret_cast<A *>(&expected);
kenjiArai 1:9db0e321a9f4 535 return core_util_atomic_compare_exchange_strong(&u, expcur, A(desired));
kenjiArai 1:9db0e321a9f4 536 }
kenjiArai 1:9db0e321a9f4 537 bool compare_exchange_strong(T &expected, T desired, memory_order success, memory_order failure) volatile noexcept
kenjiArai 1:9db0e321a9f4 538 {
kenjiArai 1:9db0e321a9f4 539 A *expcur = reinterpret_cast<A *>(&expected);
kenjiArai 1:9db0e321a9f4 540 return core_util_atomic_compare_exchange_strong_explicit(&u, expcur, A(desired), success, failure);
kenjiArai 1:9db0e321a9f4 541 }
kenjiArai 1:9db0e321a9f4 542 bool compare_exchange_strong(T &expected, T desired, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 543 {
kenjiArai 1:9db0e321a9f4 544 return compare_exchange_strong(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 545 }
kenjiArai 1:9db0e321a9f4 546 bool compare_exchange_weak(T &expected, T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 547 {
kenjiArai 1:9db0e321a9f4 548 A *expcur = reinterpret_cast<A *>(&expected);
kenjiArai 1:9db0e321a9f4 549 return core_util_atomic_compare_exchange_weak(&u, expcur, A(desired));
kenjiArai 1:9db0e321a9f4 550 }
kenjiArai 1:9db0e321a9f4 551 bool compare_exchange_weak(T &expected, T desired, memory_order success, memory_order failure) volatile noexcept
kenjiArai 1:9db0e321a9f4 552 {
kenjiArai 1:9db0e321a9f4 553 A *expcur = reinterpret_cast<A *>(&expected);
kenjiArai 1:9db0e321a9f4 554 return core_util_atomic_compare_exchange_weak_explicit(&u, expcur, A(desired), success, failure);
kenjiArai 1:9db0e321a9f4 555 }
kenjiArai 1:9db0e321a9f4 556 bool compare_exchange_weak(T &expected, T desired, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 557 {
kenjiArai 1:9db0e321a9f4 558 return compare_exchange_weak(expected, desired, order, memorder_for_failure(order));
kenjiArai 1:9db0e321a9f4 559 }
kenjiArai 1:9db0e321a9f4 560 protected:
kenjiArai 1:9db0e321a9f4 561 A u;
kenjiArai 1:9db0e321a9f4 562 void init(T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 563 {
kenjiArai 1:9db0e321a9f4 564 u = A(desired);
kenjiArai 1:9db0e321a9f4 565 }
kenjiArai 1:9db0e321a9f4 566 void init(T desired) noexcept
kenjiArai 1:9db0e321a9f4 567 {
kenjiArai 1:9db0e321a9f4 568 u = A(desired);
kenjiArai 1:9db0e321a9f4 569 }
kenjiArai 1:9db0e321a9f4 570 };
kenjiArai 1:9db0e321a9f4 571
kenjiArai 1:9db0e321a9f4 572 /* Template for an integer or pointer Atomic, including increment and
kenjiArai 1:9db0e321a9f4 573 * decrement functionality. If StrideT is void, then the increment and
kenjiArai 1:9db0e321a9f4 574 * decrement operators are ill-formed, as desired for atomic<void *>.
kenjiArai 1:9db0e321a9f4 575 */
kenjiArai 1:9db0e321a9f4 576 template<typename T, typename DiffT = T, typename StrideT = char, typename A = atomic_container_t<T>>
kenjiArai 1:9db0e321a9f4 577 struct AtomicWithAdd : public AtomicBaseInt<T, A> {
kenjiArai 1:9db0e321a9f4 578 using difference_type = DiffT;
kenjiArai 1:9db0e321a9f4 579 #ifdef __CC_ARM
kenjiArai 1:9db0e321a9f4 580 AtomicWithAdd() noexcept = default;
kenjiArai 1:9db0e321a9f4 581 constexpr AtomicWithAdd(T v) noexcept : AtomicBaseInt<T, A>(v)
kenjiArai 1:9db0e321a9f4 582 {
kenjiArai 1:9db0e321a9f4 583 }
kenjiArai 1:9db0e321a9f4 584 #else
kenjiArai 1:9db0e321a9f4 585 using AtomicBaseInt<T, A>::AtomicBaseInt;
kenjiArai 1:9db0e321a9f4 586 #endif
kenjiArai 1:9db0e321a9f4 587 T operator++() volatile noexcept
kenjiArai 1:9db0e321a9f4 588 {
kenjiArai 1:9db0e321a9f4 589 A d = static_cast<A>(sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 590 return T(core_util_atomic_incr(&this->u, d));
kenjiArai 1:9db0e321a9f4 591 }
kenjiArai 1:9db0e321a9f4 592 T operator++(int) volatile noexcept
kenjiArai 1:9db0e321a9f4 593 {
kenjiArai 1:9db0e321a9f4 594 A d = static_cast<A>(sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 595 return T(core_util_atomic_fetch_add(&this->u, d));
kenjiArai 1:9db0e321a9f4 596 }
kenjiArai 1:9db0e321a9f4 597 T operator--() volatile noexcept
kenjiArai 1:9db0e321a9f4 598 {
kenjiArai 1:9db0e321a9f4 599 A d = static_cast<A>(sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 600 return T(core_util_atomic_decr(&this->u, d));
kenjiArai 1:9db0e321a9f4 601 }
kenjiArai 1:9db0e321a9f4 602 T operator--(int) volatile
kenjiArai 1:9db0e321a9f4 603 {
kenjiArai 1:9db0e321a9f4 604 A d = static_cast<A>(sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 605 return T(core_util_atomic_fetch_sub(&this->u, d));
kenjiArai 1:9db0e321a9f4 606 }
kenjiArai 1:9db0e321a9f4 607 T fetch_add(DiffT arg) volatile
kenjiArai 1:9db0e321a9f4 608 {
kenjiArai 1:9db0e321a9f4 609 A d = static_cast<A>(arg * sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 610 return T(core_util_atomic_fetch_add(&this->u, d));
kenjiArai 1:9db0e321a9f4 611 }
kenjiArai 1:9db0e321a9f4 612 T fetch_add(DiffT arg, memory_order order) volatile
kenjiArai 1:9db0e321a9f4 613 {
kenjiArai 1:9db0e321a9f4 614 A d = static_cast<A>(arg * sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 615 return T(core_util_atomic_fetch_add_explicit(&this->u, d, order));
kenjiArai 1:9db0e321a9f4 616 }
kenjiArai 1:9db0e321a9f4 617 T operator+=(DiffT arg) volatile
kenjiArai 1:9db0e321a9f4 618 {
kenjiArai 1:9db0e321a9f4 619 A d = static_cast<A>(arg * sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 620 return T(core_util_atomic_incr(&this->u, d));
kenjiArai 1:9db0e321a9f4 621 }
kenjiArai 1:9db0e321a9f4 622 T fetch_sub(DiffT arg) volatile
kenjiArai 1:9db0e321a9f4 623 {
kenjiArai 1:9db0e321a9f4 624 A d = static_cast<A>(arg * sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 625 return T(core_util_atomic_fetch_sub(&this->u, d));
kenjiArai 1:9db0e321a9f4 626 }
kenjiArai 1:9db0e321a9f4 627 T fetch_sub(DiffT arg, memory_order order) volatile
kenjiArai 1:9db0e321a9f4 628 {
kenjiArai 1:9db0e321a9f4 629 A d = static_cast<A>(arg * sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 630 return T(core_util_atomic_fetch_sub_explicit(&this->u, d, order));
kenjiArai 1:9db0e321a9f4 631 }
kenjiArai 1:9db0e321a9f4 632 T operator-=(DiffT arg) volatile
kenjiArai 1:9db0e321a9f4 633 {
kenjiArai 1:9db0e321a9f4 634 A d = static_cast<A>(arg * sizeof(StrideT));
kenjiArai 1:9db0e321a9f4 635 return T(core_util_atomic_decr(&this->u, d));
kenjiArai 1:9db0e321a9f4 636 }
kenjiArai 1:9db0e321a9f4 637 };
kenjiArai 1:9db0e321a9f4 638
kenjiArai 1:9db0e321a9f4 639 /* Template for an integer Atomic with bitwise operations
kenjiArai 1:9db0e321a9f4 640 */
kenjiArai 1:9db0e321a9f4 641 template<typename T, typename A = atomic_container_t<T>>
kenjiArai 1:9db0e321a9f4 642 struct AtomicWithBitwise : public AtomicWithAdd<T, A> {
kenjiArai 1:9db0e321a9f4 643 #ifdef __CC_ARM
kenjiArai 1:9db0e321a9f4 644 AtomicWithBitwise() noexcept = default;
kenjiArai 1:9db0e321a9f4 645 constexpr AtomicWithBitwise(T v) noexcept : AtomicWithAdd<T, A>(v)
kenjiArai 1:9db0e321a9f4 646 {
kenjiArai 1:9db0e321a9f4 647 }
kenjiArai 1:9db0e321a9f4 648 #else
kenjiArai 1:9db0e321a9f4 649 using AtomicWithAdd<T, A>::AtomicWithAdd;
kenjiArai 1:9db0e321a9f4 650 #endif
kenjiArai 1:9db0e321a9f4 651 T fetch_and(T arg) volatile noexcept
kenjiArai 1:9db0e321a9f4 652 {
kenjiArai 1:9db0e321a9f4 653 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 654 return static_cast<T>(core_util_atomic_fetch_and(&this->u, d));
kenjiArai 1:9db0e321a9f4 655 }
kenjiArai 1:9db0e321a9f4 656 T fetch_and(T arg, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 657 {
kenjiArai 1:9db0e321a9f4 658 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 659 return static_cast<T>(core_util_atomic_fetch_and_explicit(&this->u, d, order));
kenjiArai 1:9db0e321a9f4 660 }
kenjiArai 1:9db0e321a9f4 661 T operator&=(T arg) volatile noexcept
kenjiArai 1:9db0e321a9f4 662 {
kenjiArai 1:9db0e321a9f4 663 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 664 return static_cast<T>(core_util_atomic_and_fetch(&this->u, d));
kenjiArai 1:9db0e321a9f4 665 }
kenjiArai 1:9db0e321a9f4 666 T fetch_or(T arg) volatile noexcept
kenjiArai 1:9db0e321a9f4 667 {
kenjiArai 1:9db0e321a9f4 668 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 669 return static_cast<T>(core_util_atomic_fetch_or(&this->u, d));
kenjiArai 1:9db0e321a9f4 670 }
kenjiArai 1:9db0e321a9f4 671 T fetch_or(T arg, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 672 {
kenjiArai 1:9db0e321a9f4 673 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 674 return static_cast<T>(core_util_atomic_fetch_or_explicit(&this->u, d, order));
kenjiArai 1:9db0e321a9f4 675 }
kenjiArai 1:9db0e321a9f4 676 T operator|=(T arg) volatile noexcept
kenjiArai 1:9db0e321a9f4 677 {
kenjiArai 1:9db0e321a9f4 678 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 679 return static_cast<T>(core_util_atomic_or_fetch(&this->u, d));
kenjiArai 1:9db0e321a9f4 680 }
kenjiArai 1:9db0e321a9f4 681 T fetch_xor(T arg) volatile noexcept
kenjiArai 1:9db0e321a9f4 682 {
kenjiArai 1:9db0e321a9f4 683 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 684 return static_cast<T>(core_util_atomic_fetch_xor(&this->u, d));
kenjiArai 1:9db0e321a9f4 685 }
kenjiArai 1:9db0e321a9f4 686 T fetch_xor(T arg, memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 687 {
kenjiArai 1:9db0e321a9f4 688 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 689 return static_cast<T>(core_util_atomic_fetch_xor_explicit(&this->u, d, order));
kenjiArai 1:9db0e321a9f4 690 }
kenjiArai 1:9db0e321a9f4 691 T operator^=(T arg) volatile noexcept
kenjiArai 1:9db0e321a9f4 692 {
kenjiArai 1:9db0e321a9f4 693 A d = static_cast<A>(arg);
kenjiArai 1:9db0e321a9f4 694 return static_cast<T>(core_util_atomic_xor_fetch(&this->u, d));
kenjiArai 1:9db0e321a9f4 695 }
kenjiArai 1:9db0e321a9f4 696 };
kenjiArai 1:9db0e321a9f4 697
kenjiArai 1:9db0e321a9f4 698 /* Selector between the implementations
kenjiArai 1:9db0e321a9f4 699 * bool -> AtomicBaseInt
kenjiArai 1:9db0e321a9f4 700 * other integral types -> AtomicWithBitwise
kenjiArai 1:9db0e321a9f4 701 * everything else -> AtomicBaseRaw
kenjiArai 1:9db0e321a9f4 702 * (Pointers are specialized in the public API)
kenjiArai 1:9db0e321a9f4 703 */
kenjiArai 1:9db0e321a9f4 704 // *INDENT-OFF*
kenjiArai 1:9db0e321a9f4 705 template<typename T, typename = void>
kenjiArai 1:9db0e321a9f4 706 struct AtomicSelector : type_identity<AtomicBaseRaw<T>> { };
kenjiArai 1:9db0e321a9f4 707
kenjiArai 1:9db0e321a9f4 708 template<typename T>
kenjiArai 1:9db0e321a9f4 709 struct AtomicSelector<T, enable_if_t<is_same<bool, T>::value>>
kenjiArai 1:9db0e321a9f4 710 : type_identity<AtomicBaseInt<T>> { };
kenjiArai 1:9db0e321a9f4 711
kenjiArai 1:9db0e321a9f4 712 template<typename T>
kenjiArai 1:9db0e321a9f4 713 struct AtomicSelector<T, enable_if_t<is_integral<T>::value && !is_same<bool, T>::value>>
kenjiArai 1:9db0e321a9f4 714 : type_identity<AtomicWithBitwise<T>> { };
kenjiArai 1:9db0e321a9f4 715 // *INDENT-ON*
kenjiArai 1:9db0e321a9f4 716
kenjiArai 1:9db0e321a9f4 717 template<typename T>
kenjiArai 1:9db0e321a9f4 718 using Atomic = typename AtomicSelector<T>::type;
kenjiArai 1:9db0e321a9f4 719
kenjiArai 1:9db0e321a9f4 720 } // namespace impl
kenjiArai 1:9db0e321a9f4 721
kenjiArai 1:9db0e321a9f4 722 template<typename T>
kenjiArai 1:9db0e321a9f4 723 void atomic_init(volatile atomic<T> *obj, typename atomic<T>::value_type desired) noexcept;
kenjiArai 1:9db0e321a9f4 724
kenjiArai 1:9db0e321a9f4 725 template<typename T>
kenjiArai 1:9db0e321a9f4 726 void atomic_init(atomic<T> *obj, typename atomic<T>::value_type desired) noexcept;
kenjiArai 1:9db0e321a9f4 727
kenjiArai 1:9db0e321a9f4 728 /* Base template - let impl::Atomic<T> dispatch to raw, base integer or integer-with-bitwise */
kenjiArai 1:9db0e321a9f4 729 template<typename T>
kenjiArai 1:9db0e321a9f4 730 struct atomic : public impl::Atomic<T> {
kenjiArai 1:9db0e321a9f4 731 // Constraints from LWG 3012
kenjiArai 1:9db0e321a9f4 732 static_assert(is_trivially_copyable<T>::value, "Atomic types must be TriviallyCopyable");
kenjiArai 1:9db0e321a9f4 733 static_assert(is_copy_constructible<T>::value, "Atomic types must be CopyConstructible");
kenjiArai 1:9db0e321a9f4 734 static_assert(is_move_constructible<T>::value, "Atomic types must be MoveConstructible");
kenjiArai 1:9db0e321a9f4 735 static_assert(is_copy_assignable<T>::value, "Atomic types must be CopyAssignable");
kenjiArai 1:9db0e321a9f4 736 static_assert(is_move_assignable<T>::value, "Atomic types must be MoveAssignable");
kenjiArai 1:9db0e321a9f4 737 atomic() noexcept = default;
kenjiArai 1:9db0e321a9f4 738 atomic(const atomic &) = delete;
kenjiArai 1:9db0e321a9f4 739 constexpr atomic(T v) noexcept : impl::Atomic<T>(std::move(v))
kenjiArai 1:9db0e321a9f4 740 {
kenjiArai 1:9db0e321a9f4 741 }
kenjiArai 1:9db0e321a9f4 742 operator T() const volatile noexcept
kenjiArai 1:9db0e321a9f4 743 {
kenjiArai 1:9db0e321a9f4 744 return this->load();
kenjiArai 1:9db0e321a9f4 745 }
kenjiArai 1:9db0e321a9f4 746 operator T() const noexcept
kenjiArai 1:9db0e321a9f4 747 {
kenjiArai 1:9db0e321a9f4 748 return this->load();
kenjiArai 1:9db0e321a9f4 749 }
kenjiArai 1:9db0e321a9f4 750 T operator=(T desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 751 {
kenjiArai 1:9db0e321a9f4 752 this->store(desired);
kenjiArai 1:9db0e321a9f4 753 return desired;
kenjiArai 1:9db0e321a9f4 754 }
kenjiArai 1:9db0e321a9f4 755 T operator=(T desired) noexcept
kenjiArai 1:9db0e321a9f4 756 {
kenjiArai 1:9db0e321a9f4 757 this->store(desired);
kenjiArai 1:9db0e321a9f4 758 return desired;
kenjiArai 1:9db0e321a9f4 759 }
kenjiArai 1:9db0e321a9f4 760 atomic &operator=(const atomic &) = delete;
kenjiArai 1:9db0e321a9f4 761 private:
kenjiArai 1:9db0e321a9f4 762 friend void atomic_init<>(volatile atomic *obj, typename atomic::value_type desired) noexcept;
kenjiArai 1:9db0e321a9f4 763 friend void atomic_init<>(atomic *obj, typename atomic::value_type desired) noexcept;
kenjiArai 1:9db0e321a9f4 764 };
kenjiArai 1:9db0e321a9f4 765
kenjiArai 1:9db0e321a9f4 766
kenjiArai 1:9db0e321a9f4 767 /* Pointer specialisation - support increment and decrement by ptrdiff_t,
kenjiArai 1:9db0e321a9f4 768 * as long as sizeof(T) is valid to act as the stride. Annoyingly, C++11
kenjiArai 1:9db0e321a9f4 769 * doesn't provide operator->, so neither do we, so you have to say
kenjiArai 1:9db0e321a9f4 770 * "aptr.load()->member" to use it to access a structure. *aptr is fine though.
kenjiArai 1:9db0e321a9f4 771 */
kenjiArai 1:9db0e321a9f4 772 template<typename T>
kenjiArai 1:9db0e321a9f4 773 struct atomic<T *> : public impl::AtomicWithAdd<T *, ptrdiff_t, T> {
kenjiArai 1:9db0e321a9f4 774 atomic() noexcept = default;
kenjiArai 1:9db0e321a9f4 775 atomic(const atomic &) = delete;
kenjiArai 1:9db0e321a9f4 776 constexpr atomic(T *v) noexcept : impl::AtomicWithAdd<T *, ptrdiff_t, T>(v)
kenjiArai 1:9db0e321a9f4 777 {
kenjiArai 1:9db0e321a9f4 778 }
kenjiArai 1:9db0e321a9f4 779 operator T *() const volatile noexcept
kenjiArai 1:9db0e321a9f4 780 {
kenjiArai 1:9db0e321a9f4 781 return this->load();
kenjiArai 1:9db0e321a9f4 782 }
kenjiArai 1:9db0e321a9f4 783 operator T *() const noexcept
kenjiArai 1:9db0e321a9f4 784 {
kenjiArai 1:9db0e321a9f4 785 return this->load();
kenjiArai 1:9db0e321a9f4 786 }
kenjiArai 1:9db0e321a9f4 787 T *operator=(T *desired) volatile noexcept
kenjiArai 1:9db0e321a9f4 788 {
kenjiArai 1:9db0e321a9f4 789 this->store(desired);
kenjiArai 1:9db0e321a9f4 790 return desired;
kenjiArai 1:9db0e321a9f4 791 }
kenjiArai 1:9db0e321a9f4 792 T *operator=(T *desired) noexcept
kenjiArai 1:9db0e321a9f4 793 {
kenjiArai 1:9db0e321a9f4 794 this->store(desired);
kenjiArai 1:9db0e321a9f4 795 return desired;
kenjiArai 1:9db0e321a9f4 796 }
kenjiArai 1:9db0e321a9f4 797 atomic &operator=(const atomic &) = delete;
kenjiArai 1:9db0e321a9f4 798 private:
kenjiArai 1:9db0e321a9f4 799 friend void atomic_init<>(volatile atomic *obj, typename atomic::value_type desired) noexcept;
kenjiArai 1:9db0e321a9f4 800 friend void atomic_init<>(atomic *obj, typename atomic::value_type desired) noexcept;
kenjiArai 1:9db0e321a9f4 801 };
kenjiArai 1:9db0e321a9f4 802
kenjiArai 1:9db0e321a9f4 803 using atomic_bool = atomic<bool>;
kenjiArai 1:9db0e321a9f4 804 using atomic_char = atomic<char>;
kenjiArai 1:9db0e321a9f4 805 using atomic_schar = atomic<signed char>;
kenjiArai 1:9db0e321a9f4 806 using atomic_uchar = atomic<unsigned char>;
kenjiArai 1:9db0e321a9f4 807 using atomic_char16_t = atomic<char16_t>;
kenjiArai 1:9db0e321a9f4 808 using atomic_char32_t = atomic<char32_t>;
kenjiArai 1:9db0e321a9f4 809 using atomic_wchar_t = atomic<wchar_t>;
kenjiArai 1:9db0e321a9f4 810 using atomic_short = atomic<short>;
kenjiArai 1:9db0e321a9f4 811 using atomic_ushort = atomic<unsigned short>;
kenjiArai 1:9db0e321a9f4 812 using atomic_int = atomic<int>;
kenjiArai 1:9db0e321a9f4 813 using atomic_uint = atomic<unsigned int>;
kenjiArai 1:9db0e321a9f4 814 using atomic_long = atomic<long>;
kenjiArai 1:9db0e321a9f4 815 using atomic_ulong = atomic<unsigned long>;
kenjiArai 1:9db0e321a9f4 816 using atomic_llong = atomic<long long>;
kenjiArai 1:9db0e321a9f4 817 using atomic_ullong = atomic<unsigned long long>;
kenjiArai 1:9db0e321a9f4 818 using atomic_int8_t = atomic<int8_t>;
kenjiArai 1:9db0e321a9f4 819 using atomic_uint8_t = atomic<uint8_t>;
kenjiArai 1:9db0e321a9f4 820 using atomic_int16_t = atomic<int16_t>;
kenjiArai 1:9db0e321a9f4 821 using atomic_uint16_t = atomic<uint16_t>;
kenjiArai 1:9db0e321a9f4 822 using atomic_int32_t = atomic<int32_t>;
kenjiArai 1:9db0e321a9f4 823 using atomic_uint32_t = atomic<uint32_t>;
kenjiArai 1:9db0e321a9f4 824 using atomic_int64_t = atomic<int64_t>;
kenjiArai 1:9db0e321a9f4 825 using atomic_uint64_t = atomic<uint64_t>;
kenjiArai 1:9db0e321a9f4 826 using atomic_int_least8_t = atomic<int_least8_t>;
kenjiArai 1:9db0e321a9f4 827 using atomic_uint_least8_t = atomic<uint_least8_t>;
kenjiArai 1:9db0e321a9f4 828 using atomic_int_least16_t = atomic<int_least16_t>;
kenjiArai 1:9db0e321a9f4 829 using atomic_uint_least16_t = atomic<uint_least16_t>;
kenjiArai 1:9db0e321a9f4 830 using atomic_int_least32_t = atomic<int_least32_t>;
kenjiArai 1:9db0e321a9f4 831 using atomic_uint_least32_t = atomic<uint_least32_t>;
kenjiArai 1:9db0e321a9f4 832 using atomic_int_least64_t = atomic<int_least64_t>;
kenjiArai 1:9db0e321a9f4 833 using atomic_uint_least64_t = atomic<uint_least64_t>;
kenjiArai 1:9db0e321a9f4 834 using atomic_int_fast8_t = atomic<int_fast8_t>;
kenjiArai 1:9db0e321a9f4 835 using atomic_uint_fast8_t = atomic<uint_fast8_t>;
kenjiArai 1:9db0e321a9f4 836 using atomic_int_fast16_t = atomic<int_fast16_t>;
kenjiArai 1:9db0e321a9f4 837 using atomic_uint_fast16_t = atomic<uint_fast16_t>;
kenjiArai 1:9db0e321a9f4 838 using atomic_int_fast32_t = atomic<int_fast32_t>;
kenjiArai 1:9db0e321a9f4 839 using atomic_uint_fast32_t = atomic<uint_fast32_t>;
kenjiArai 1:9db0e321a9f4 840 using atomic_int_fast64_t = atomic<int_fast64_t>;
kenjiArai 1:9db0e321a9f4 841 using atomic_uint_fast64_t = atomic<uint_fast64_t>;
kenjiArai 1:9db0e321a9f4 842 using atomic_intptr_t = atomic<intptr_t>;
kenjiArai 1:9db0e321a9f4 843 using atomic_uintptr_t = atomic<uintptr_t>;
kenjiArai 1:9db0e321a9f4 844 using atomic_size_t = atomic<size_t>;
kenjiArai 1:9db0e321a9f4 845 using atomic_ptrdiff_t = atomic<ptrdiff_t>;
kenjiArai 1:9db0e321a9f4 846 using atomic_intmax_t = atomic<intmax_t>;
kenjiArai 1:9db0e321a9f4 847 using atomic_uintmax_t = atomic<uintmax_t>;
kenjiArai 1:9db0e321a9f4 848
kenjiArai 1:9db0e321a9f4 849 template<typename T>
kenjiArai 1:9db0e321a9f4 850 void atomic_init(atomic<T> *obj, typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 851 {
kenjiArai 1:9db0e321a9f4 852 obj->init(desired);
kenjiArai 1:9db0e321a9f4 853 }
kenjiArai 1:9db0e321a9f4 854
kenjiArai 1:9db0e321a9f4 855 template<typename T>
kenjiArai 1:9db0e321a9f4 856 void atomic_init(volatile atomic<T> *obj, typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 857 {
kenjiArai 1:9db0e321a9f4 858 obj->init(desired);
kenjiArai 1:9db0e321a9f4 859 }
kenjiArai 1:9db0e321a9f4 860
kenjiArai 1:9db0e321a9f4 861 template<typename T>
kenjiArai 1:9db0e321a9f4 862 bool atomic_is_lock_free(const atomic<T> *obj) noexcept
kenjiArai 1:9db0e321a9f4 863 {
kenjiArai 1:9db0e321a9f4 864 return obj->is_lock_free();
kenjiArai 1:9db0e321a9f4 865 }
kenjiArai 1:9db0e321a9f4 866
kenjiArai 1:9db0e321a9f4 867 template<typename T>
kenjiArai 1:9db0e321a9f4 868 bool atomic_is_lock_free(const volatile atomic<T> *obj) noexcept
kenjiArai 1:9db0e321a9f4 869 {
kenjiArai 1:9db0e321a9f4 870 return obj->is_lock_free();
kenjiArai 1:9db0e321a9f4 871 }
kenjiArai 1:9db0e321a9f4 872
kenjiArai 1:9db0e321a9f4 873 template<typename T>
kenjiArai 1:9db0e321a9f4 874 void atomic_store(atomic<T> *obj, typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 875 {
kenjiArai 1:9db0e321a9f4 876 obj->store(desired);
kenjiArai 1:9db0e321a9f4 877 }
kenjiArai 1:9db0e321a9f4 878
kenjiArai 1:9db0e321a9f4 879 template<typename T>
kenjiArai 1:9db0e321a9f4 880 void atomic_store(volatile atomic<T> *obj, typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 881 {
kenjiArai 1:9db0e321a9f4 882 obj->store(desired);
kenjiArai 1:9db0e321a9f4 883 }
kenjiArai 1:9db0e321a9f4 884
kenjiArai 1:9db0e321a9f4 885 template<typename T>
kenjiArai 1:9db0e321a9f4 886 void atomic_store_explicit(atomic<T> *obj, typename atomic<T>::value_type desired, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 887 {
kenjiArai 1:9db0e321a9f4 888 obj->store(desired, order);
kenjiArai 1:9db0e321a9f4 889 }
kenjiArai 1:9db0e321a9f4 890
kenjiArai 1:9db0e321a9f4 891 template<typename T>
kenjiArai 1:9db0e321a9f4 892 void atomic_store_explicit(volatile atomic<T> *obj, typename atomic<T>::value_type desired, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 893 {
kenjiArai 1:9db0e321a9f4 894 obj->store(desired, order);
kenjiArai 1:9db0e321a9f4 895 }
kenjiArai 1:9db0e321a9f4 896
kenjiArai 1:9db0e321a9f4 897 template<typename T>
kenjiArai 1:9db0e321a9f4 898 T atomic_load(const atomic<T> *obj) noexcept
kenjiArai 1:9db0e321a9f4 899 {
kenjiArai 1:9db0e321a9f4 900 return obj->load();
kenjiArai 1:9db0e321a9f4 901 }
kenjiArai 1:9db0e321a9f4 902
kenjiArai 1:9db0e321a9f4 903 template<typename T>
kenjiArai 1:9db0e321a9f4 904 T atomic_load(const volatile atomic<T> *obj) noexcept
kenjiArai 1:9db0e321a9f4 905 {
kenjiArai 1:9db0e321a9f4 906 return obj->load();
kenjiArai 1:9db0e321a9f4 907 }
kenjiArai 1:9db0e321a9f4 908
kenjiArai 1:9db0e321a9f4 909 template<typename T>
kenjiArai 1:9db0e321a9f4 910 T atomic_load_explicit(const atomic<T> *obj, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 911 {
kenjiArai 1:9db0e321a9f4 912 return obj->load(order);
kenjiArai 1:9db0e321a9f4 913 }
kenjiArai 1:9db0e321a9f4 914
kenjiArai 1:9db0e321a9f4 915 template<typename T>
kenjiArai 1:9db0e321a9f4 916 T atomic_load_explicit(const volatile atomic<T> *obj, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 917 {
kenjiArai 1:9db0e321a9f4 918 return obj->load(order);
kenjiArai 1:9db0e321a9f4 919 }
kenjiArai 1:9db0e321a9f4 920
kenjiArai 1:9db0e321a9f4 921 template<typename T>
kenjiArai 1:9db0e321a9f4 922 T atomic_exchange(atomic<T> *obj, typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 923 {
kenjiArai 1:9db0e321a9f4 924 return obj->exchange(desired);
kenjiArai 1:9db0e321a9f4 925 }
kenjiArai 1:9db0e321a9f4 926
kenjiArai 1:9db0e321a9f4 927 template<typename T>
kenjiArai 1:9db0e321a9f4 928 T atomic_exchange(volatile atomic<T> *obj, typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 929 {
kenjiArai 1:9db0e321a9f4 930 return obj->exchange(desired);
kenjiArai 1:9db0e321a9f4 931 }
kenjiArai 1:9db0e321a9f4 932
kenjiArai 1:9db0e321a9f4 933 template<typename T>
kenjiArai 1:9db0e321a9f4 934 T atomic_exchange_explicit(atomic<T> *obj, typename atomic<T>::value_type desired, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 935 {
kenjiArai 1:9db0e321a9f4 936 return obj->exchange(desired, order);
kenjiArai 1:9db0e321a9f4 937 }
kenjiArai 1:9db0e321a9f4 938
kenjiArai 1:9db0e321a9f4 939 template<typename T>
kenjiArai 1:9db0e321a9f4 940 T atomic_exchange_explicit(volatile atomic<T> *obj, typename atomic<T>::value_type desired, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 941 {
kenjiArai 1:9db0e321a9f4 942 return obj->exchange(desired, order);
kenjiArai 1:9db0e321a9f4 943 }
kenjiArai 1:9db0e321a9f4 944
kenjiArai 1:9db0e321a9f4 945 template<typename T>
kenjiArai 1:9db0e321a9f4 946 bool atomic_compare_exchange_weak(atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 947 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 948 typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 949 {
kenjiArai 1:9db0e321a9f4 950 return obj->compare_exchange_weak(obj, *currentExpected, desired);
kenjiArai 1:9db0e321a9f4 951 }
kenjiArai 1:9db0e321a9f4 952
kenjiArai 1:9db0e321a9f4 953 template<typename T>
kenjiArai 1:9db0e321a9f4 954 bool atomic_compare_exchange_weak(volatile atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 955 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 956 typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 957 {
kenjiArai 1:9db0e321a9f4 958 return obj->compare_exchange_weak(obj, *currentExpected, desired);
kenjiArai 1:9db0e321a9f4 959 }
kenjiArai 1:9db0e321a9f4 960
kenjiArai 1:9db0e321a9f4 961 template<typename T>
kenjiArai 1:9db0e321a9f4 962 bool atomic_compare_exchange_strong(atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 963 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 964 typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 965 {
kenjiArai 1:9db0e321a9f4 966 return obj->compare_exchange_strong(obj, *currentExpected, desired);
kenjiArai 1:9db0e321a9f4 967 }
kenjiArai 1:9db0e321a9f4 968
kenjiArai 1:9db0e321a9f4 969 template<typename T>
kenjiArai 1:9db0e321a9f4 970 bool atomic_compare_exchange_strong(volatile atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 971 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 972 typename atomic<T>::value_type desired) noexcept
kenjiArai 1:9db0e321a9f4 973 {
kenjiArai 1:9db0e321a9f4 974 return obj->compare_exchange_strong(obj, *currentExpected, desired);
kenjiArai 1:9db0e321a9f4 975 }
kenjiArai 1:9db0e321a9f4 976
kenjiArai 1:9db0e321a9f4 977 template<typename T>
kenjiArai 1:9db0e321a9f4 978 bool atomic_compare_exchange_weak_explicit(atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 979 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 980 typename atomic<T>::value_type desired,
kenjiArai 1:9db0e321a9f4 981 memory_order success,
kenjiArai 1:9db0e321a9f4 982 memory_order failure) noexcept
kenjiArai 1:9db0e321a9f4 983 {
kenjiArai 1:9db0e321a9f4 984 return obj->compare_exchange_weak(obj, *currentExpected, desired, success, failure);
kenjiArai 1:9db0e321a9f4 985 }
kenjiArai 1:9db0e321a9f4 986
kenjiArai 1:9db0e321a9f4 987 template<typename T>
kenjiArai 1:9db0e321a9f4 988 bool atomic_compare_exchange_weak_explicit(volatile atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 989 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 990 typename atomic<T>::value_type desired,
kenjiArai 1:9db0e321a9f4 991 memory_order success,
kenjiArai 1:9db0e321a9f4 992 memory_order failure) noexcept
kenjiArai 1:9db0e321a9f4 993 {
kenjiArai 1:9db0e321a9f4 994 return obj->compare_exchange_weak(obj, *currentExpected, desired, success, failure);
kenjiArai 1:9db0e321a9f4 995 }
kenjiArai 1:9db0e321a9f4 996
kenjiArai 1:9db0e321a9f4 997 template<typename T>
kenjiArai 1:9db0e321a9f4 998 bool atomic_compare_exchange_strong_explicit(atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 999 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 1000 typename atomic<T>::value_type desired,
kenjiArai 1:9db0e321a9f4 1001 memory_order success,
kenjiArai 1:9db0e321a9f4 1002 memory_order failure) noexcept
kenjiArai 1:9db0e321a9f4 1003 {
kenjiArai 1:9db0e321a9f4 1004 return obj->compare_exchange_strong(obj, *currentExpected, desired, success, failure);
kenjiArai 1:9db0e321a9f4 1005 }
kenjiArai 1:9db0e321a9f4 1006
kenjiArai 1:9db0e321a9f4 1007 template<typename T>
kenjiArai 1:9db0e321a9f4 1008 bool atomic_compare_exchange_strong_explicit(volatile atomic<T> *obj,
kenjiArai 1:9db0e321a9f4 1009 typename atomic<T>::value_type *currentExpected,
kenjiArai 1:9db0e321a9f4 1010 typename atomic<T>::value_type desired,
kenjiArai 1:9db0e321a9f4 1011 memory_order success,
kenjiArai 1:9db0e321a9f4 1012 memory_order failure) noexcept
kenjiArai 1:9db0e321a9f4 1013 {
kenjiArai 1:9db0e321a9f4 1014 return obj->compare_exchange_strong(obj, *currentExpected, desired, success, failure);
kenjiArai 1:9db0e321a9f4 1015 }
kenjiArai 1:9db0e321a9f4 1016
kenjiArai 1:9db0e321a9f4 1017 template<typename T>
kenjiArai 1:9db0e321a9f4 1018 T atomic_fetch_add(atomic<T> *obj, typename atomic<T>::difference_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1019 {
kenjiArai 1:9db0e321a9f4 1020 return obj->fetch_add(arg);
kenjiArai 1:9db0e321a9f4 1021 }
kenjiArai 1:9db0e321a9f4 1022
kenjiArai 1:9db0e321a9f4 1023 template<typename T>
kenjiArai 1:9db0e321a9f4 1024 T atomic_fetch_add(volatile atomic<T> *obj, typename atomic<T>::difference_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1025 {
kenjiArai 1:9db0e321a9f4 1026 return obj->fetch_add(arg);
kenjiArai 1:9db0e321a9f4 1027 }
kenjiArai 1:9db0e321a9f4 1028
kenjiArai 1:9db0e321a9f4 1029 template<typename T>
kenjiArai 1:9db0e321a9f4 1030 T atomic_fetch_add_explicit(atomic<T> *obj, typename atomic<T>::difference_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1031 {
kenjiArai 1:9db0e321a9f4 1032 return obj->fetch_add(arg, order);
kenjiArai 1:9db0e321a9f4 1033 }
kenjiArai 1:9db0e321a9f4 1034
kenjiArai 1:9db0e321a9f4 1035 template<typename T>
kenjiArai 1:9db0e321a9f4 1036 T atomic_fetch_add_explicit(volatile atomic<T> *obj, typename atomic<T>::difference_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1037 {
kenjiArai 1:9db0e321a9f4 1038 return obj->fetch_add(arg, order);
kenjiArai 1:9db0e321a9f4 1039 }
kenjiArai 1:9db0e321a9f4 1040
kenjiArai 1:9db0e321a9f4 1041 template<typename T>
kenjiArai 1:9db0e321a9f4 1042 T atomic_fetch_sub(atomic<T> *obj, typename atomic<T>::difference_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1043 {
kenjiArai 1:9db0e321a9f4 1044 return obj->fetch_sub(arg);
kenjiArai 1:9db0e321a9f4 1045 }
kenjiArai 1:9db0e321a9f4 1046
kenjiArai 1:9db0e321a9f4 1047 template<typename T>
kenjiArai 1:9db0e321a9f4 1048 T atomic_fetch_sub(volatile atomic<T> *obj, typename atomic<T>::difference_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1049 {
kenjiArai 1:9db0e321a9f4 1050 return obj->fetch_sub(arg);
kenjiArai 1:9db0e321a9f4 1051 }
kenjiArai 1:9db0e321a9f4 1052
kenjiArai 1:9db0e321a9f4 1053 template<typename T>
kenjiArai 1:9db0e321a9f4 1054 T atomic_fetch_sub_explicit(atomic<T> *obj, typename atomic<T>::difference_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1055 {
kenjiArai 1:9db0e321a9f4 1056 return obj->fetch_sub(arg, order);
kenjiArai 1:9db0e321a9f4 1057 }
kenjiArai 1:9db0e321a9f4 1058
kenjiArai 1:9db0e321a9f4 1059 template<typename T>
kenjiArai 1:9db0e321a9f4 1060 T atomic_fetch_sub_explicit(volatile atomic<T> *obj, typename atomic<T>::difference_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1061 {
kenjiArai 1:9db0e321a9f4 1062 return obj->fetch_sub(arg, order);
kenjiArai 1:9db0e321a9f4 1063 }
kenjiArai 1:9db0e321a9f4 1064
kenjiArai 1:9db0e321a9f4 1065 template<typename T>
kenjiArai 1:9db0e321a9f4 1066 T atomic_fetch_and(atomic<T> *obj, typename atomic<T>::value_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1067 {
kenjiArai 1:9db0e321a9f4 1068 return obj->fetch_and(arg);
kenjiArai 1:9db0e321a9f4 1069 }
kenjiArai 1:9db0e321a9f4 1070
kenjiArai 1:9db0e321a9f4 1071 template<typename T>
kenjiArai 1:9db0e321a9f4 1072 T atomic_fetch_and(volatile atomic<T> *obj, typename atomic<T>::value_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1073 {
kenjiArai 1:9db0e321a9f4 1074 return obj->fetch_and(arg);
kenjiArai 1:9db0e321a9f4 1075 }
kenjiArai 1:9db0e321a9f4 1076
kenjiArai 1:9db0e321a9f4 1077 template<typename T>
kenjiArai 1:9db0e321a9f4 1078 T atomic_fetch_and_explicit(atomic<T> *obj, typename atomic<T>::value_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1079 {
kenjiArai 1:9db0e321a9f4 1080 return obj->fetch_and(arg, order);
kenjiArai 1:9db0e321a9f4 1081 }
kenjiArai 1:9db0e321a9f4 1082
kenjiArai 1:9db0e321a9f4 1083 template<typename T>
kenjiArai 1:9db0e321a9f4 1084 T atomic_fetch_and_explicit(volatile atomic<T> *obj, typename atomic<T>::value_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1085 {
kenjiArai 1:9db0e321a9f4 1086 return obj->fetch_and(arg, order);
kenjiArai 1:9db0e321a9f4 1087 }
kenjiArai 1:9db0e321a9f4 1088
kenjiArai 1:9db0e321a9f4 1089 template<typename T>
kenjiArai 1:9db0e321a9f4 1090 T atomic_fetch_or(atomic<T> *obj, typename atomic<T>::value_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1091 {
kenjiArai 1:9db0e321a9f4 1092 return obj->fetch_or(arg);
kenjiArai 1:9db0e321a9f4 1093 }
kenjiArai 1:9db0e321a9f4 1094
kenjiArai 1:9db0e321a9f4 1095 template<typename T>
kenjiArai 1:9db0e321a9f4 1096 T atomic_fetch_or(volatile atomic<T> *obj, typename atomic<T>::value_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1097 {
kenjiArai 1:9db0e321a9f4 1098 return obj->fetch_or(arg);
kenjiArai 1:9db0e321a9f4 1099 }
kenjiArai 1:9db0e321a9f4 1100
kenjiArai 1:9db0e321a9f4 1101 template<typename T>
kenjiArai 1:9db0e321a9f4 1102 T atomic_fetch_or_explicit(atomic<T> *obj, typename atomic<T>::value_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1103 {
kenjiArai 1:9db0e321a9f4 1104 return obj->fetch_or(arg, order);
kenjiArai 1:9db0e321a9f4 1105 }
kenjiArai 1:9db0e321a9f4 1106
kenjiArai 1:9db0e321a9f4 1107 template<typename T>
kenjiArai 1:9db0e321a9f4 1108 T atomic_fetch_or_explicit(volatile atomic<T> *obj, typename atomic<T>::value_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1109 {
kenjiArai 1:9db0e321a9f4 1110 return obj->fetch_or(arg, order);
kenjiArai 1:9db0e321a9f4 1111 }
kenjiArai 1:9db0e321a9f4 1112
kenjiArai 1:9db0e321a9f4 1113 template<typename T>
kenjiArai 1:9db0e321a9f4 1114 T atomic_fetch_xor(atomic<T> *obj, typename atomic<T>::value_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1115 {
kenjiArai 1:9db0e321a9f4 1116 return obj->fetch_xor(arg);
kenjiArai 1:9db0e321a9f4 1117 }
kenjiArai 1:9db0e321a9f4 1118
kenjiArai 1:9db0e321a9f4 1119 template<typename T>
kenjiArai 1:9db0e321a9f4 1120 T atomic_fetch_xor(volatile atomic<T> *obj, typename atomic<T>::value_type arg) noexcept
kenjiArai 1:9db0e321a9f4 1121 {
kenjiArai 1:9db0e321a9f4 1122 return obj->fetch_xor(arg);
kenjiArai 1:9db0e321a9f4 1123 }
kenjiArai 1:9db0e321a9f4 1124
kenjiArai 1:9db0e321a9f4 1125 template<typename T>
kenjiArai 1:9db0e321a9f4 1126 T atomic_fetch_xor_explicit(atomic<T> *obj, typename atomic<T>::value_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1127 {
kenjiArai 1:9db0e321a9f4 1128 return obj->fetch_xor(arg, order);
kenjiArai 1:9db0e321a9f4 1129 }
kenjiArai 1:9db0e321a9f4 1130
kenjiArai 1:9db0e321a9f4 1131 template<typename T>
kenjiArai 1:9db0e321a9f4 1132 T atomic_fetch_xor_explicit(volatile atomic<T> *obj, typename atomic<T>::value_type arg, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1133 {
kenjiArai 1:9db0e321a9f4 1134 return obj->fetch_xor(arg, order);
kenjiArai 1:9db0e321a9f4 1135 }
kenjiArai 1:9db0e321a9f4 1136
kenjiArai 1:9db0e321a9f4 1137 struct atomic_flag {
kenjiArai 1:9db0e321a9f4 1138 atomic_flag() noexcept = default;
kenjiArai 1:9db0e321a9f4 1139 atomic_flag(const atomic_flag &) = delete;
kenjiArai 1:9db0e321a9f4 1140 atomic_flag &operator=(const atomic_flag &) = delete;
kenjiArai 1:9db0e321a9f4 1141 atomic_flag &operator=(const atomic_flag &) volatile = delete;
kenjiArai 1:9db0e321a9f4 1142 bool test_and_set() volatile noexcept
kenjiArai 1:9db0e321a9f4 1143 {
kenjiArai 1:9db0e321a9f4 1144 return core_util_atomic_flag_test_and_set(&_flag);
kenjiArai 1:9db0e321a9f4 1145 }
kenjiArai 1:9db0e321a9f4 1146 bool test_and_set(memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 1147 {
kenjiArai 1:9db0e321a9f4 1148 return core_util_atomic_flag_test_and_set_explicit(&_flag, order);
kenjiArai 1:9db0e321a9f4 1149 }
kenjiArai 1:9db0e321a9f4 1150 void clear() volatile noexcept
kenjiArai 1:9db0e321a9f4 1151 {
kenjiArai 1:9db0e321a9f4 1152 core_util_atomic_flag_clear(&_flag);
kenjiArai 1:9db0e321a9f4 1153 }
kenjiArai 1:9db0e321a9f4 1154 void clear() noexcept
kenjiArai 1:9db0e321a9f4 1155 {
kenjiArai 1:9db0e321a9f4 1156 core_util_atomic_flag_clear(&_flag);
kenjiArai 1:9db0e321a9f4 1157 }
kenjiArai 1:9db0e321a9f4 1158 void clear(memory_order order) volatile noexcept
kenjiArai 1:9db0e321a9f4 1159 {
kenjiArai 1:9db0e321a9f4 1160 core_util_atomic_flag_clear_explicit(&_flag, order);
kenjiArai 1:9db0e321a9f4 1161 }
kenjiArai 1:9db0e321a9f4 1162 void clear(memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1163 {
kenjiArai 1:9db0e321a9f4 1164 core_util_atomic_flag_clear_explicit(&_flag, order);
kenjiArai 1:9db0e321a9f4 1165 }
kenjiArai 1:9db0e321a9f4 1166 private:
kenjiArai 1:9db0e321a9f4 1167 core_util_atomic_flag _flag;
kenjiArai 1:9db0e321a9f4 1168 };
kenjiArai 1:9db0e321a9f4 1169
kenjiArai 1:9db0e321a9f4 1170 MBED_FORCEINLINE bool atomic_flag_test_and_set(volatile atomic_flag *flag) noexcept
kenjiArai 1:9db0e321a9f4 1171 {
kenjiArai 1:9db0e321a9f4 1172 return flag->test_and_set();
kenjiArai 1:9db0e321a9f4 1173 }
kenjiArai 1:9db0e321a9f4 1174
kenjiArai 1:9db0e321a9f4 1175 MBED_FORCEINLINE bool atomic_flag_test_and_set(atomic_flag *flag) noexcept
kenjiArai 1:9db0e321a9f4 1176 {
kenjiArai 1:9db0e321a9f4 1177 return flag->test_and_set();
kenjiArai 1:9db0e321a9f4 1178 }
kenjiArai 1:9db0e321a9f4 1179
kenjiArai 1:9db0e321a9f4 1180 MBED_FORCEINLINE bool atomic_flag_test_and_set_explicit(volatile atomic_flag *flag, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1181 {
kenjiArai 1:9db0e321a9f4 1182 return flag->test_and_set(order);
kenjiArai 1:9db0e321a9f4 1183 }
kenjiArai 1:9db0e321a9f4 1184
kenjiArai 1:9db0e321a9f4 1185 MBED_FORCEINLINE bool atomic_flag_test_and_set_explicit(atomic_flag *flag, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1186 {
kenjiArai 1:9db0e321a9f4 1187 return flag->test_and_set(order);
kenjiArai 1:9db0e321a9f4 1188 }
kenjiArai 1:9db0e321a9f4 1189
kenjiArai 1:9db0e321a9f4 1190 MBED_FORCEINLINE void atomic_flag_clear(volatile atomic_flag *flag) noexcept
kenjiArai 1:9db0e321a9f4 1191 {
kenjiArai 1:9db0e321a9f4 1192 flag->clear();
kenjiArai 1:9db0e321a9f4 1193 }
kenjiArai 1:9db0e321a9f4 1194
kenjiArai 1:9db0e321a9f4 1195 MBED_FORCEINLINE void atomic_flag_clear(atomic_flag *flag) noexcept
kenjiArai 1:9db0e321a9f4 1196 {
kenjiArai 1:9db0e321a9f4 1197 flag->clear();
kenjiArai 1:9db0e321a9f4 1198 }
kenjiArai 1:9db0e321a9f4 1199
kenjiArai 1:9db0e321a9f4 1200 MBED_FORCEINLINE void atomic_flag_clear_explicit(volatile atomic_flag *flag, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1201 {
kenjiArai 1:9db0e321a9f4 1202 flag->clear(order);
kenjiArai 1:9db0e321a9f4 1203 }
kenjiArai 1:9db0e321a9f4 1204
kenjiArai 1:9db0e321a9f4 1205 MBED_FORCEINLINE void atomic_flag_clear_explicit(atomic_flag *flag, memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1206 {
kenjiArai 1:9db0e321a9f4 1207 flag->clear(order);
kenjiArai 1:9db0e321a9f4 1208 }
kenjiArai 1:9db0e321a9f4 1209
kenjiArai 1:9db0e321a9f4 1210 #define MSTD_ATOMIC_FLAG_INIT { CORE_UTIL_ATOMIC_FLAG_INIT }
kenjiArai 1:9db0e321a9f4 1211
kenjiArai 1:9db0e321a9f4 1212 template<typename T>
kenjiArai 1:9db0e321a9f4 1213 T kill_dependency(T y) noexcept
kenjiArai 1:9db0e321a9f4 1214 {
kenjiArai 1:9db0e321a9f4 1215 return y;
kenjiArai 1:9db0e321a9f4 1216 }
kenjiArai 1:9db0e321a9f4 1217
kenjiArai 1:9db0e321a9f4 1218 MBED_FORCEINLINE void atomic_signal_fence(memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1219 {
kenjiArai 1:9db0e321a9f4 1220 if (order != memory_order_relaxed) {
kenjiArai 1:9db0e321a9f4 1221 MBED_COMPILER_BARRIER();
kenjiArai 1:9db0e321a9f4 1222 }
kenjiArai 1:9db0e321a9f4 1223 }
kenjiArai 1:9db0e321a9f4 1224
kenjiArai 1:9db0e321a9f4 1225 MBED_FORCEINLINE void atomic_thread_fence(memory_order order) noexcept
kenjiArai 1:9db0e321a9f4 1226 {
kenjiArai 1:9db0e321a9f4 1227 if (order != memory_order_relaxed) {
kenjiArai 1:9db0e321a9f4 1228 MBED_BARRIER();
kenjiArai 1:9db0e321a9f4 1229 }
kenjiArai 1:9db0e321a9f4 1230 }
kenjiArai 1:9db0e321a9f4 1231 /**@}*/
kenjiArai 1:9db0e321a9f4 1232
kenjiArai 1:9db0e321a9f4 1233 /**@}*/
kenjiArai 1:9db0e321a9f4 1234
kenjiArai 1:9db0e321a9f4 1235 } // namespace mstd
kenjiArai 1:9db0e321a9f4 1236
kenjiArai 1:9db0e321a9f4 1237 #endif