Kenji Arai / mbed-os_TYBLE16

Dependents:   TYBLE16_simple_data_logger TYBLE16_MP3_Air

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_atomic.h Source File

mbed_atomic.h

00001 
00002 /*
00003  * Copyright (c) 2015-2019, ARM Limited, All Rights Reserved
00004  * SPDX-License-Identifier: Apache-2.0
00005  *
00006  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00007  * not use this file except in compliance with the License.
00008  * You may obtain a copy of the License at
00009  *
00010  * http://www.apache.org/licenses/LICENSE-2.0
00011  *
00012  * Unless required by applicable law or agreed to in writing, software
00013  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00014  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00015  * See the License for the specific language governing permissions and
00016  * limitations under the License.
00017  */
00018 
00019 #ifndef __MBED_UTIL_ATOMIC_H__
00020 #define __MBED_UTIL_ATOMIC_H__
00021 
00022 #include "cmsis.h"
00023 
00024 #include <stdbool.h>
00025 #include <stdint.h>
00026 #include <stddef.h>
00027 #include "platform/mbed_toolchain.h"
00028 
00029 /** \addtogroup platform-public-api */
00030 /** @{*/
00031 
00032 /**
00033  * \defgroup platform_atomic atomic functions
00034  *
00035  * Atomic functions function analogously to C11 and C++11 - loads have
00036  * acquire semantics, stores have release semantics, and atomic operations
00037  * are sequentially consistent. Atomicity is enforced both between threads and
00038  * interrupt handlers.
00039  *
00040  * @{
00041  */
00042 
00043 #ifdef __cplusplus
00044 extern "C" {
00045 #endif
00046 
00047 /**
00048  * Memory order constraints for atomic operations. Intended semantics
00049  * are as per C++11.
00050  */
00051 typedef enum mbed_memory_order {
00052     /* Bits 0 = consume
00053      *      1 = acquire (explicitly requested, or implied by seq.cst)
00054      *      2 = release (explicitly requested, or implied by seq.cst)
00055      *      4 = sequentially consistent
00056      */
00057     mbed_memory_order_relaxed = 0x00,
00058     mbed_memory_order_consume = 0x01,
00059     mbed_memory_order_acquire = 0x02,
00060     mbed_memory_order_release = 0x04,
00061     mbed_memory_order_acq_rel = 0x06,
00062     mbed_memory_order_seq_cst = 0x16
00063 } mbed_memory_order;
00064 
00065 // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
00066 #ifndef MBED_EXCLUSIVE_ACCESS
00067 #ifndef __EXCLUSIVE_ACCESS
00068 #if defined __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
00069 #if ((__ARM_ARCH_7M__      == 1U) || \
00070     (__ARM_ARCH_7EM__     == 1U) || \
00071     (__ARM_ARCH_8M_BASE__ == 1U) || \
00072     (__ARM_ARCH_8M_MAIN__ == 1U)) || \
00073     (__ARM_ARCH_7A__ == 1U)
00074 #define MBED_EXCLUSIVE_ACCESS      1U
00075 #define MBED_EXCLUSIVE_ACCESS_THUMB1 (__ARM_ARCH_8M_BASE__ == 1U)
00076 #ifdef __ICCARM__
00077 #if __CPU_MODE__ == 2
00078 #define MBED_EXCLUSIVE_ACCESS_ARM  1U
00079 #else
00080 #define MBED_EXCLUSIVE_ACCESS_ARM  0U
00081 #endif
00082 #else
00083 #if !defined (__thumb__)
00084 #define MBED_EXCLUSIVE_ACCESS_ARM  1U
00085 #else
00086 #define MBED_EXCLUSIVE_ACCESS_ARM  0U
00087 #endif
00088 #endif
00089 #elif (__ARM_ARCH_6M__ == 1U)
00090 #define MBED_EXCLUSIVE_ACCESS      0U
00091 #else
00092 #error "Unknown ARM architecture for exclusive access"
00093 #endif // __ARM_ARCH_xxx
00094 #else // __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
00095 // Seem to be compiling for non-ARM, so stick with critical section implementations
00096 #define MBED_EXCLUSIVE_ACCESS      0U
00097 #endif
00098 #else
00099 #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
00100 #endif
00101 #endif
00102 
00103 #if MBED_EXCLUSIVE_ACCESS
00104 #define MBED_INLINE_IF_EX inline
00105 #else
00106 #define MBED_INLINE_IF_EX
00107 #endif
00108 
00109 /**
00110  * A lock-free, primitive atomic flag.
00111  *
00112  * Emulate C11's atomic_flag. The flag is initially in an indeterminate state
00113  * unless explicitly initialized with CORE_UTIL_ATOMIC_FLAG_INIT.
00114  */
00115 typedef struct core_util_atomic_flag {
00116     uint8_t _flag;
00117 } core_util_atomic_flag;
00118 
00119 /**
00120  * Initializer for a core_util_atomic_flag.
00121  *
00122  * Example:
00123  * ~~~
00124  *     core_util_atomic_flag in_progress = CORE_UTIL_ATOMIC_FLAG_INIT;
00125  * ~~~
00126  */
00127 #define CORE_UTIL_ATOMIC_FLAG_INIT { 0 }
00128 
00129 /**
00130  * Atomic test and set.
00131  *
00132  * Atomically tests then sets the flag to true, returning the previous value.
00133  *
00134  * @param  flagPtr Target flag being tested and set.
00135  * @return         The previous value.
00136  */
00137 MBED_INLINE_IF_EX bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr);
00138 
00139 /** \ copydoc core_util_atomic_flag_test_and_set
00140  * @param order memory ordering constraint
00141  */
00142 MBED_FORCEINLINE bool core_util_atomic_flag_test_and_set_explicit(volatile core_util_atomic_flag *valuePtr, mbed_memory_order order);
00143 
00144 /**
00145  * Atomic clear.
00146  *
00147  * @param  flagPtr Target flag being cleared.
00148  */
00149 MBED_FORCEINLINE void core_util_atomic_flag_clear(volatile core_util_atomic_flag *flagPtr);
00150 
00151 /** \ copydoc core_util_atomic_flag_clear
00152  * @param order memory ordering constraint
00153  */
00154 MBED_FORCEINLINE void core_util_atomic_flag_clear_explicit(volatile core_util_atomic_flag *flagPtr, mbed_memory_order order);
00155 
00156 
00157 /**
00158  * Atomic compare and set. It compares the contents of a memory location to a
00159  * given value and, only if they are the same, modifies the contents of that
00160  * memory location to a given new value. This is done as a single atomic
00161  * operation. The atomicity guarantees that the new value is calculated based on
00162  * up-to-date information; if the value had been updated by another thread in
00163  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
00164  *
00165  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
00166  * you to the article on compare-and swap].
00167  *
00168  * @param  ptr                  The target memory location.
00169  * @param[in,out] expectedCurrentValue A pointer to some location holding the
00170  *                              expected current value of the data being set atomically.
00171  *                              The computed 'desiredValue' should be a function of this current value.
00172  *                              @note: This is an in-out parameter. In the
00173  *                              failure case of atomic_cas (where the
00174  *                              destination isn't set), the pointee of expectedCurrentValue is
00175  *                              updated with the current value.
00176  * @param[in] desiredValue      The new value computed based on '*expectedCurrentValue'.
00177  *
00178  * @return                      true if the memory location was atomically
00179  *                              updated with the desired value (after verifying
00180  *                              that it contained the expectedCurrentValue),
00181  *                              false otherwise. In the failure case,
00182  *                              exepctedCurrentValue is updated with the new
00183  *                              value of the target memory location.
00184  *
00185  * pseudocode:
00186  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
00187  *     if *p != *old {
00188  *         *old = *p
00189  *         return false
00190  *     }
00191  *     *p = new
00192  *     return true
00193  * }
00194  *
00195  * @note: In the failure case (where the destination isn't set), the value
00196  * pointed to by expectedCurrentValue is instead updated with the current value.
00197  * This property helps writing concise code for the following incr:
00198  *
00199  * function incr(p : pointer to int, a : int) returns int {
00200  *     done = false
00201  *     value = atomic_load(p)
00202  *     while not done {
00203  *         done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success
00204  *     }
00205  *     return value + a
00206  * }
00207  *
00208  * However, if the call is made in a loop like this, the atomic_compare_exchange_weak
00209  * functions are to be preferred.
00210  *
00211  * @note: This corresponds to the C11 "atomic_compare_exchange_strong" - it
00212  * always succeeds if the current value is expected, as per the pseudocode
00213  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
00214  * This call would normally be used when a fail return does not retry.
00215  */
00216 MBED_INLINE_IF_EX bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
00217 
00218 /** \copydoc core_util_atomic_cas_u8
00219  * @param success memory ordering constraint for successful exchange
00220  * @param failure memory ordering constraint for failure
00221  */
00222 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00223 
00224 /** \copydoc core_util_atomic_cas_u8 */
00225 MBED_INLINE_IF_EX bool core_util_atomic_cas_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
00226 
00227 /** \copydoc core_util_atomic_cas_explicit_u8 */
00228 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00229 
00230 /** \copydoc core_util_atomic_cas_u8 */
00231 MBED_INLINE_IF_EX bool core_util_atomic_cas_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
00232 
00233 /** \copydoc core_util_atomic_cas_explicit_u8 */
00234 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00235 
00236 /** \copydoc core_util_atomic_cas_u8 */
00237 bool core_util_atomic_cas_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
00238 
00239 /** \copydoc core_util_atomic_cas_explicit_u8 */
00240 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00241 
00242 /** \copydoc core_util_atomic_cas_u8 */
00243 MBED_FORCEINLINE bool core_util_atomic_cas_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
00244 
00245 /** \copydoc core_util_atomic_cas_explicit_u8 */
00246 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00247 
00248 /** \copydoc core_util_atomic_cas_u8 */
00249 MBED_FORCEINLINE bool core_util_atomic_cas_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
00250 
00251 /** \copydoc core_util_atomic_cas_explicit_u8 */
00252 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00253 
00254 /** \copydoc core_util_atomic_cas_u8 */
00255 MBED_FORCEINLINE bool core_util_atomic_cas_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
00256 
00257 /** \copydoc core_util_atomic_cas_explicit_u8 */
00258 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00259 
00260 /** \copydoc core_util_atomic_cas_u8 */
00261 MBED_FORCEINLINE bool core_util_atomic_cas_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
00262 
00263 /** \copydoc core_util_atomic_cas_explicit_u8 */
00264 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00265 
00266 /** \copydoc core_util_atomic_cas_u8 */
00267 MBED_FORCEINLINE bool core_util_atomic_cas_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
00268 
00269 /** \copydoc core_util_atomic_cas_explicit_u8 */
00270 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
00271 
00272 /** \copydoc core_util_atomic_cas_u8 */
00273 inline bool core_util_atomic_cas_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
00274 
00275 /** \copydoc core_util_atomic_cas_explicit_u8 */
00276 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
00277 
00278 
00279 
00280 /**
00281  * Atomic compare and set. It compares the contents of a memory location to a
00282  * given value and, only if they are the same, modifies the contents of that
00283  * memory location to a given new value. This is done as a single atomic
00284  * operation. The atomicity guarantees that the new value is calculated based on
00285  * up-to-date information; if the value had been updated by another thread in
00286  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
00287  *
00288  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
00289  * you to the article on compare-and swap].
00290  *
00291  * @param  ptr                  The target memory location.
00292  * @param[in,out] expectedCurrentValue A pointer to some location holding the
00293  *                              expected current value of the data being set atomically.
00294  *                              The computed 'desiredValue' should be a function of this current value.
00295  *                              @note: This is an in-out parameter. In the
00296  *                              failure case of atomic_cas (where the
00297  *                              destination isn't set), the pointee of expectedCurrentValue is
00298  *                              updated with the current value.
00299  * @param[in] desiredValue      The new value computed based on '*expectedCurrentValue'.
00300  *
00301  * @return                      true if the memory location was atomically
00302  *                              updated with the desired value (after verifying
00303  *                              that it contained the expectedCurrentValue),
00304  *                              false otherwise. In the failure case,
00305  *                              exepctedCurrentValue is updated with the new
00306  *                              value of the target memory location.
00307  *
00308  * pseudocode:
00309  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
00310  *     if *p != *old or spurious failure {
00311  *         *old = *p
00312  *         return false
00313  *     }
00314  *     *p = new
00315  *     return true
00316  * }
00317  *
00318  * @note: In the failure case (where the destination isn't set), the value
00319  * pointed to by expectedCurrentValue is instead updated with the current value.
00320  * This property helps writing concise code for the following incr:
00321  *
00322  * function incr(p : pointer to int, a : int) returns int {
00323  *     done = false
00324  *     value = *p // This fetch operation need not be atomic.
00325  *     while not done {
00326  *         done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success
00327  *     }
00328  *     return value + a
00329  * }
00330  *
00331  * @note: This corresponds to the C11 "atomic_compare_exchange_weak" - it
00332  * may spuriously fail if the current value is expected, as per the pseudocode
00333  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
00334  * This call would normally be used when a fail return will cause a retry anyway,
00335  * saving the need for an extra loop inside the cas operation.
00336  */
00337 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
00338 
00339 /** \copydoc core_util_atomic_compare_exchange_weak_u8
00340  * @param success memory ordering constraint for successful exchange
00341  * @param failure memory ordering constraint for failure
00342  */
00343 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00344 
00345 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00346 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
00347 
00348 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00349 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00350 
00351 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00352 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
00353 
00354 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00355 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00356 
00357 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00358 bool core_util_atomic_compare_exchange_weak_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
00359 
00360 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00361 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00362 
00363 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00364 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
00365 
00366 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00367 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00368 
00369 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00370 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
00371 
00372 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00373 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00374 
00375 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00376 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
00377 
00378 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00379 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00380 
00381 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00382 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
00383 
00384 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00385 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
00386 
00387 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00388 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
00389 
00390 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00391 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
00392 
00393 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00394 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
00395 
00396 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00397 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
00398 
00399 
00400 /**
00401  * Atomic load.
00402  * @param  valuePtr Target memory location.
00403  * @return          The loaded value.
00404  */
00405 MBED_FORCEINLINE uint8_t core_util_atomic_load_u8(const volatile uint8_t *valuePtr);
00406 
00407 /**
00408  * \copydoc core_util_atomic_load_u8
00409  * @param order memory ordering constraint
00410  */
00411 MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8 (const volatile uint8_t *valuePtr, mbed_memory_order order);
00412 
00413 /** \copydoc core_util_atomic_load_u8 */
00414 MBED_FORCEINLINE uint16_t core_util_atomic_load_u16 (const volatile uint16_t *valuePtr);
00415 
00416 /** \copydoc core_util_atomic_load_explicit_u8 */
00417 MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16 (const volatile uint16_t *valuePtr, mbed_memory_order order);
00418 
00419 /** \copydoc core_util_atomic_load_u8 */
00420 MBED_FORCEINLINE uint32_t core_util_atomic_load_u32 (const volatile uint32_t *valuePtr);
00421 
00422 /** \copydoc core_util_atomic_load_explicit_u8 */
00423 MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32 (const volatile uint32_t *valuePtr, mbed_memory_order order);
00424 
00425 /** \copydoc core_util_atomic_load_u8 */
00426 uint64_t core_util_atomic_load_u64 (const volatile uint64_t *valuePtr);
00427 
00428 /** \copydoc core_util_atomic_load_explicit_u8 */
00429 MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64 (const volatile uint64_t *valuePtr, mbed_memory_order order);
00430 
00431 /** \copydoc core_util_atomic_load_u8 */
00432 MBED_FORCEINLINE int8_t core_util_atomic_load_s8 (const volatile int8_t *valuePtr);
00433 
00434 /** \copydoc core_util_atomic_load_explicit_u8 */
00435 MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8 (const volatile int8_t *valuePtr, mbed_memory_order order);
00436 
00437 /** \copydoc core_util_atomic_load_u8 */
00438 MBED_FORCEINLINE int16_t core_util_atomic_load_s16 (const volatile int16_t *valuePtr);
00439 
00440 /** \copydoc core_util_atomic_load_explicit_u8 */
00441 MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16 (const volatile int16_t *valuePtr, mbed_memory_order order);
00442 
00443 /** \copydoc core_util_atomic_load_u8 */
00444 MBED_FORCEINLINE int32_t core_util_atomic_load_s32 (const volatile int32_t *valuePtr);
00445 
00446 /** \copydoc core_util_atomic_load_explicit_u8 */
00447 MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32 (const volatile int32_t *valuePtr, mbed_memory_order order);
00448 
00449 /** \copydoc core_util_atomic_load_u8 */
00450 MBED_FORCEINLINE int64_t core_util_atomic_load_s64 (const volatile int64_t *valuePtr);
00451 
00452 /** \copydoc core_util_atomic_load_u8 */
00453 MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64 (const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order);
00454 
00455 /** \copydoc core_util_atomic_load_u8 */
00456 MBED_FORCEINLINE bool core_util_atomic_load_bool (const volatile bool *valuePtr);
00457 
00458 /** \copydoc core_util_atomic_load_u8 */
00459 MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool (const volatile bool *valuePtr, mbed_memory_order order);
00460 
00461 /** \copydoc core_util_atomic_load_u8 */
00462 MBED_FORCEINLINE void *core_util_atomic_load_ptr (void *const volatile *valuePtr);
00463 
00464 /** \copydoc core_util_atomic_load_u8 */
00465 MBED_FORCEINLINE void *core_util_atomic_load_explicit_ptr (void *const volatile *valuePtr, mbed_memory_order order);
00466 
00467 /**
00468  * Atomic store.
00469  * @param  valuePtr     Target memory location.
00470  * @param  desiredValue The value to store.
00471  */
00472 MBED_FORCEINLINE void core_util_atomic_store_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
00473 
00474 /**
00475  * \copydoc core_util_atomic_store_u8
00476  * @param order memory ordering constraint
00477  */
00478 MBED_FORCEINLINE void core_util_atomic_store_explicit_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
00479 
00480 /** \copydoc core_util_atomic_store_u8 */
00481 MBED_FORCEINLINE void core_util_atomic_store_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue);
00482 
00483 /** \copydoc core_util_atomic_store_explicit_u8 */
00484 MBED_FORCEINLINE void core_util_atomic_store_explicit_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
00485 
00486 /** \copydoc core_util_atomic_store_u8 */
00487 MBED_FORCEINLINE void core_util_atomic_store_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue);
00488 
00489 /** \copydoc core_util_atomic_store_explicit_u8 */
00490 MBED_FORCEINLINE void core_util_atomic_store_explicit_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
00491 
00492 /** \copydoc core_util_atomic_store_u8 */
00493 void core_util_atomic_store_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue);
00494 
00495 /** \copydoc core_util_atomic_store_explicit_u8 */
00496 MBED_FORCEINLINE void core_util_atomic_store_explicit_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
00497 
00498 /** \copydoc core_util_atomic_store_u8 */
00499 MBED_FORCEINLINE void core_util_atomic_store_s8 (volatile int8_t *valuePtr, int8_t desiredValue);
00500 
00501 /** \copydoc core_util_atomic_store_explicit_u8 */
00502 MBED_FORCEINLINE void core_util_atomic_store_explicit_s8 (volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
00503 
00504 /** \copydoc core_util_atomic_store_u8 */
00505 MBED_FORCEINLINE void core_util_atomic_store_s16 (volatile int16_t *valuePtr, int16_t desiredValue);
00506 
00507 /** \copydoc core_util_atomic_store_explicit_u8 */
00508 MBED_FORCEINLINE void core_util_atomic_store_explicit_s16 (volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
00509 
00510 /** \copydoc core_util_atomic_store_u8 */
00511 MBED_FORCEINLINE void core_util_atomic_store_s32 (volatile int32_t *valuePtr, int32_t desiredValue);
00512 
00513 /** \copydoc core_util_atomic_store_explicit_u8 */
00514 MBED_FORCEINLINE void core_util_atomic_store_explicit_s32 (volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
00515 
00516 /** \copydoc core_util_atomic_store_u8 */
00517 MBED_FORCEINLINE void core_util_atomic_store_s64 (volatile int64_t *valuePtr, int64_t desiredValue);
00518 
00519 /** \copydoc core_util_atomic_store_explicit_u8 */
00520 MBED_FORCEINLINE void core_util_atomic_store_explicit_s64 (volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
00521 
00522 /** \copydoc core_util_atomic_store_u8 */
00523 MBED_FORCEINLINE void core_util_atomic_store_bool (volatile bool *valuePtr, bool desiredValue);
00524 
00525 /** \copydoc core_util_atomic_store_explicit_u8 */
00526 MBED_FORCEINLINE void core_util_atomic_store_explicit_bool (volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
00527 
00528 /** \copydoc core_util_atomic_store_u8 */
00529 MBED_FORCEINLINE void core_util_atomic_store_ptr (void *volatile *valuePtr, void *desiredValue);
00530 
00531 /** \copydoc core_util_atomic_store_explicit_u8 */
00532 MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr (void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
00533 
00534 /**
00535  * Atomic exchange.
00536  * @param  valuePtr     Target memory location.
00537  * @param  desiredValue The value to store.
00538  * @return              The previous value.
00539  */
00540 MBED_INLINE_IF_EX uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
00541 
00542 /** \copydoc core_util_atomic_exchange_u8
00543  * @param order memory ordering constraint
00544  */
00545 MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
00546 
00547 /** \copydoc core_util_atomic_exchange_u8 */
00548 MBED_INLINE_IF_EX uint16_t core_util_atomic_exchange_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue);
00549 
00550 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00551 MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
00552 
00553 /** \copydoc core_util_atomic_exchange_u8 */
00554 MBED_INLINE_IF_EX uint32_t core_util_atomic_exchange_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue);
00555 
00556 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00557 MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
00558 
00559 /** \copydoc core_util_atomic_exchange_u8 */
00560 uint64_t core_util_atomic_exchange_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue);
00561 
00562 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00563 MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
00564 
00565 /** \copydoc core_util_atomic_exchange_u8 */
00566 MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8 (volatile int8_t *valuePtr, int8_t desiredValue);
00567 
00568 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00569 MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8 (volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
00570 
00571 /** \copydoc core_util_atomic_exchange_u8 */
00572 MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16 (volatile int16_t *valuePtr, int16_t desiredValue);
00573 
00574 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00575 MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16 (volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
00576 
00577 /** \copydoc core_util_atomic_exchange_u8 */
00578 MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32 (volatile int32_t *valuePtr, int32_t desiredValue);
00579 
00580 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00581 MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32 (volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
00582 
00583 /** \copydoc core_util_atomic_exchange_u8 */
00584 MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64 (volatile int64_t *valuePtr, int64_t desiredValue);
00585 
00586 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00587 MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64 (volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
00588 
00589 /** \copydoc core_util_atomic_exchange_u8 */
00590 MBED_FORCEINLINE bool core_util_atomic_exchange_bool (volatile bool *valuePtr, bool desiredValue);
00591 
00592 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00593 MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool (volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
00594 
00595 /** \copydoc core_util_atomic_exchange_u8 */
00596 inline void *core_util_atomic_exchange_ptr (void *volatile *valuePtr, void *desiredValue);
00597 
00598 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00599 MBED_FORCEINLINE void *core_util_atomic_exchange_explicit_ptr (void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
00600 
00601 /**
00602  * Atomic increment.
00603  * @param  valuePtr Target memory location being incremented.
00604  * @param  delta    The amount being incremented.
00605  * @return          The new incremented value.
00606  */
00607 MBED_INLINE_IF_EX uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta);
00608 
00609 /** \copydoc core_util_atomic_incr_u8 */
00610 MBED_INLINE_IF_EX uint16_t core_util_atomic_incr_u16 (volatile uint16_t *valuePtr, uint16_t delta);
00611 
00612 /** \copydoc core_util_atomic_incr_u8 */
00613 MBED_INLINE_IF_EX uint32_t core_util_atomic_incr_u32 (volatile uint32_t *valuePtr, uint32_t delta);
00614 
00615 /** \copydoc core_util_atomic_incr_u8 */
00616 uint64_t core_util_atomic_incr_u64 (volatile uint64_t *valuePtr, uint64_t delta);
00617 
00618 /** \copydoc core_util_atomic_incr_u8 */
00619 MBED_FORCEINLINE int8_t core_util_atomic_incr_s8 (volatile int8_t *valuePtr, int8_t delta);
00620 
00621 /** \copydoc core_util_atomic_incr_u8 */
00622 MBED_FORCEINLINE int16_t core_util_atomic_incr_s16 (volatile int16_t *valuePtr, int16_t delta);
00623 
00624 /** \copydoc core_util_atomic_incr_u8 */
00625 MBED_FORCEINLINE int32_t core_util_atomic_incr_s32 (volatile int32_t *valuePtr, int32_t delta);
00626 
00627 /** \copydoc core_util_atomic_incr_u8 */
00628 MBED_FORCEINLINE int64_t core_util_atomic_incr_s64 (volatile int64_t *valuePtr, int64_t delta);
00629 
00630 /** \copydoc core_util_atomic_incr_u8 */
00631 inline void *core_util_atomic_incr_ptr (void *volatile *valuePtr, ptrdiff_t delta);
00632 
00633 /**
00634  * Atomic decrement.
00635  * @param  valuePtr Target memory location being decremented.
00636  * @param  delta    The amount being decremented.
00637  * @return          The new decremented value.
00638  */
00639 MBED_INLINE_IF_EX uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta);
00640 
00641 /** \copydoc core_util_atomic_decr_u8 */
00642 MBED_INLINE_IF_EX uint16_t core_util_atomic_decr_u16 (volatile uint16_t *valuePtr, uint16_t delta);
00643 
00644 /** \copydoc core_util_atomic_decr_u8 */
00645 MBED_INLINE_IF_EX uint32_t core_util_atomic_decr_u32 (volatile uint32_t *valuePtr, uint32_t delta);
00646 
00647 /** \copydoc core_util_atomic_decr_u8 */
00648 uint64_t core_util_atomic_decr_u64 (volatile uint64_t *valuePtr, uint64_t delta);
00649 
00650 /** \copydoc core_util_atomic_decr_u8 */
00651 MBED_FORCEINLINE int8_t core_util_atomic_decr_s8 (volatile int8_t *valuePtr, int8_t delta);
00652 
00653 /** \copydoc core_util_atomic_decr_u8 */
00654 MBED_FORCEINLINE int16_t core_util_atomic_decr_s16 (volatile int16_t *valuePtr, int16_t delta);
00655 
00656 /** \copydoc core_util_atomic_decr_u8 */
00657 MBED_FORCEINLINE int32_t core_util_atomic_decr_s32 (volatile int32_t *valuePtr, int32_t delta);
00658 
00659 /** \copydoc core_util_atomic_decr_u8 */
00660 MBED_FORCEINLINE int64_t core_util_atomic_decr_s64 (volatile int64_t *valuePtr, int64_t delta);
00661 
00662 /** \copydoc core_util_atomic_decr_u8 */
00663 inline void *core_util_atomic_decr_ptr (void *volatile *valuePtr, ptrdiff_t delta);
00664 
00665 /**
00666  * Atomic add.
00667  * @param  valuePtr Target memory location being modified.
00668  * @param  arg      The argument for the addition.
00669  * @return          The original value.
00670  */
00671 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg);
00672 
00673 /** \copydoc core_util_atomic_fetch_add_u8
00674  * @param order memory ordering constraint
00675  */
00676 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
00677 
00678 /** \copydoc core_util_atomic_fetch_add_u8 */
00679 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_add_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00680 
00681 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00682 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
00683 
00684 /** \copydoc core_util_atomic_fetch_add_u8 */
00685 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_add_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00686 
00687 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00688 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
00689 
00690 /** \copydoc core_util_atomic_fetch_add_u8 */
00691 uint64_t core_util_atomic_fetch_add_u64 (volatile uint64_t *valuePtr, uint64_t arg);
00692 
00693 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00694 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
00695 
00696 /** \copydoc core_util_atomic_fetch_add_u8 */
00697 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8 (volatile int8_t *valuePtr, int8_t arg);
00698 
00699 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00700 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8 (volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
00701 
00702 /** \copydoc core_util_atomic_fetch_add_u8 */
00703 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16 (volatile int16_t *valuePtr, int16_t arg);
00704 
00705 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00706 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16 (volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
00707 
00708 /** \copydoc core_util_atomic_fetch_add_u8 */
00709 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32 (volatile int32_t *valuePtr, int32_t arg);
00710 
00711 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00712 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32 (volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
00713 
00714 /** \copydoc core_util_atomic_fetch_add_u8 */
00715 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64 (volatile int64_t *valuePtr, int64_t arg);
00716 
00717 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00718 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64 (volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
00719 
00720 /** \copydoc core_util_atomic_fetch_add_u8 */
00721 MBED_FORCEINLINE void *core_util_atomic_fetch_add_ptr (void *volatile *valuePtr, ptrdiff_t arg);
00722 
00723 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
00724 MBED_FORCEINLINE void *core_util_atomic_fetch_add_explicit_ptr (void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
00725 
00726 /**
00727  * Atomic subtract.
00728  * @param  valuePtr Target memory location being modified.
00729  * @param  arg      The argument for the subtraction.
00730  * @return          The original value.
00731  */
00732 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg);
00733 
00734 /** \copydoc core_util_atomic_fetch_sub_u8
00735  * @param order memory ordering constraint
00736  */
00737 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
00738 
00739 /** \copydoc core_util_atomic_fetch_sub_u8 */
00740 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_sub_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00741 
00742 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00743 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
00744 
00745 /** \copydoc core_util_atomic_fetch_sub_u8 */
00746 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_sub_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00747 
00748 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00749 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
00750 
00751 /** \copydoc core_util_atomic_fetch_sub_u8 */
00752 uint64_t core_util_atomic_fetch_sub_u64 (volatile uint64_t *valuePtr, uint64_t arg);
00753 
00754 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00755 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
00756 
00757 /** \copydoc core_util_atomic_fetch_sub_u8 */
00758 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8 (volatile int8_t *valuePtr, int8_t arg);
00759 
00760 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00761 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8 (volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
00762 
00763 /** \copydoc core_util_atomic_fetch_sub_u8 */
00764 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16 (volatile int16_t *valuePtr, int16_t arg);
00765 
00766 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00767 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16 (volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
00768 
00769 /** \copydoc core_util_atomic_fetch_sub_u8 */
00770 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32 (volatile int32_t *valuePtr, int32_t arg);
00771 
00772 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00773 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32 (volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
00774 
00775 /** \copydoc core_util_atomic_fetch_sub_u8 */
00776 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64 (volatile int64_t *valuePtr, int64_t arg);
00777 
00778 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00779 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64 (volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
00780 
00781 /** \copydoc core_util_atomic_fetch_sub_u8 */
00782 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_ptr (void *volatile *valuePtr, ptrdiff_t arg);
00783 
00784 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
00785 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_explicit_ptr (void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
00786 
00787 /**
00788  * Atomic bitwise and.
00789  * @param  valuePtr Target memory location being modified.
00790  * @param  arg      The argument for the bitwise operation.
00791  * @return          The original value.
00792  */
00793 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg);
00794 
00795 /** \copydoc core_util_atomic_fetch_and_u8
00796  * @param order memory ordering constraint
00797  */
00798 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
00799 
00800 /** \copydoc core_util_atomic_fetch_and_u8 */
00801 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_and_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00802 
00803 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
00804 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
00805 
00806 /** \copydoc core_util_atomic_fetch_and_u8 */
00807 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_and_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00808 
00809 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
00810 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
00811 
00812 /** \copydoc core_util_atomic_fetch_and_u8 */
00813 uint64_t core_util_atomic_fetch_and_u64 (volatile uint64_t *valuePtr, uint64_t arg);
00814 
00815 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
00816 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
00817 
00818 /**
00819  * Atomic bitwise inclusive or.
00820  * @param  valuePtr Target memory location being modified.
00821  * @param  arg      The argument for the bitwise operation.
00822  * @return          The original value.
00823  */
00824 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg);
00825 
00826 /** \copydoc core_util_atomic_fetch_or_u8
00827  * @param order memory ordering constraint
00828  */
00829 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
00830 
00831 /** \copydoc core_util_atomic_fetch_or_u8 */
00832 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_or_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00833 
00834 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
00835 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
00836 
00837 /** \copydoc core_util_atomic_fetch_or_u8 */
00838 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_or_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00839 
00840 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
00841 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
00842 
00843 /** \copydoc core_util_atomic_fetch_or_u8 */
00844 uint64_t core_util_atomic_fetch_or_u64 (volatile uint64_t *valuePtr, uint64_t arg);
00845 
00846 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
00847 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
00848 
00849 /**
00850  * Atomic bitwise exclusive or.
00851  * @param  valuePtr Target memory location being modified.
00852  * @param  arg      The argument for the bitwise operation.
00853  * @return          The original value.
00854  */
00855 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg);
00856 
00857 /** \copydoc core_util_atomic_fetch_xor_u8
00858  * @param order memory ordering constraint
00859  */
00860 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
00861 
00862 /** \copydoc core_util_atomic_fetch_xor_u8 */
00863 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_xor_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00864 
00865 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
00866 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
00867 
00868 /** \copydoc core_util_atomic_fetch_xor_u8 */
00869 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_xor_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00870 
00871 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
00872 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
00873 
00874 /** \copydoc core_util_atomic_fetch_xor_u8 */
00875 uint64_t core_util_atomic_fetch_xor_u64 (volatile uint64_t *valuePtr, uint64_t arg);
00876 
00877 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
00878 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
00879 
00880 #ifdef __cplusplus
00881 } // extern "C"
00882 
00883 #include <mstd_type_traits>
00884 
00885 // For each operation, two overloaded templates:
00886 // * one for non-pointer types, which has implementations based on the
00887 //   u8/u16/u32/u64/s8/s16/s32/s64/bool functions above. No base implementation.
00888 // * one for any pointer type, generically implemented based on ptr function above.
00889 //
00890 // Templates use standard C/C++ naming - old incr/decr/cas forms are not provided.
00891 //
00892 // The `type_identity_t<T>` used here means "same type as T", blocking template
00893 // argument deduction. It forces type selection based on the type of the actual pointer
00894 // to the atomic. If just `T` was used, the following would be ambiguous:
00895 // core_util_atomic_store(&my_uint8_t, 1) - it wouldn't be able to select between T
00896 // being uint8_t and int.
00897 
00898 /** \copydoc core_util_atomic_load_u8 */
00899 template<typename T> T core_util_atomic_load (const volatile T *valuePtr) noexcept;
00900 /** \copydoc core_util_atomic_load_u8 */
00901 template<typename T> T core_util_atomic_load (const T *valuePtr) noexcept;
00902 /** \copydoc core_util_atomic_store_u8 */
00903 template<typename T> void core_util_atomic_store (volatile T *valuePtr, mstd::type_identity_t<T> desiredValue) noexcept;
00904 /** \copydoc core_util_atomic_store_u8 */
00905 template<typename T> void core_util_atomic_store (T *valuePtr, mstd::type_identity_t<T> desiredValue) noexcept;
00906 /** \copydoc core_util_atomic_exchange_u8 */
00907 template<typename T> T core_util_atomic_exchange (volatile T *ptr, mstd::type_identity_t<T> desiredValue) noexcept;
00908 /** \copydoc core_util_atomic_cas_u8 */
00909 template<typename T> bool core_util_atomic_compare_exchange_strong (volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue) noexcept;
00910 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
00911 template<typename T> bool core_util_atomic_compare_exchange_weak (volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue) noexcept;
00912 /** \copydoc core_util_fetch_add_u8 */
00913 template<typename T> T core_util_atomic_fetch_add (volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
00914 /** \copydoc core_util_fetch_sub_u8 */
00915 template<typename T> T core_util_atomic_fetch_sub (volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
00916 /** \copydoc core_util_fetch_and_u8 */
00917 template<typename T> T core_util_atomic_fetch_and (volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
00918 /** \copydoc core_util_fetch_or_u8 */
00919 template<typename T> T core_util_atomic_fetch_or (volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
00920 /** \copydoc core_util_fetch_xor_u8 */
00921 template<typename T> T core_util_atomic_fetch_xor (volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
00922 
00923 /** \copydoc core_util_atomic_load_explicit_u8 */
00924 template<typename T> T core_util_atomic_load_explicit (const volatile T *valuePtr, mbed_memory_order order) noexcept;
00925 /** \copydoc core_util_atomic_load_explicit_u8 */
00926 template<typename T> T core_util_atomic_load_explicit (const T *valuePtr, mbed_memory_order order) noexcept;
00927 /** \copydoc core_util_atomic_store_explicit_u8 */
00928 template<typename T> void core_util_atomic_store_explicit (volatile T *valuePtr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
00929 /** \copydoc core_util_atomic_store_explicit_u8 */
00930 template<typename T> void core_util_atomic_store_explicit (T *valuePtr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
00931 /** \copydoc core_util_atomic_exchange_explicit_u8 */
00932 template<typename T> T core_util_atomic_exchange_explicit (volatile T *ptr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
00933 /** \copydoc core_util_atomic_cas_explicit_u8 */
00934 template<typename T> bool core_util_atomic_compare_exchange_strong_explicit (volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
00935 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
00936 template<typename T> bool core_util_atomic_compare_exchange_weak_explicit (volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
00937 /** \copydoc core_util_fetch_add_explicit_u8 */
00938 template<typename T> T core_util_atomic_fetch_add_explicit (volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
00939 /** \copydoc core_util_fetch_sub_explicit_u8 */
00940 template<typename T> T core_util_atomic_fetch_sub_explicit (volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
00941 /** \copydoc core_util_fetch_and_explicit_u8 */
00942 template<typename T> T core_util_atomic_fetch_and_explicit (volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
00943 /** \copydoc core_util_fetch_or_explicit_u8 */
00944 template<typename T> T core_util_atomic_fetch_or_explicit (volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
00945 /** \copydoc core_util_fetch_xor_explicit_u8 */
00946 template<typename T> T core_util_atomic_fetch_xor_explicit (volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
00947 
00948 /** \copydoc core_util_atomic_load_ptr */
00949 template<typename T> inline T *core_util_atomic_load (T *const volatile *valuePtr) noexcept;
00950 /** \copydoc core_util_atomic_load_ptr */
00951 template<typename T> inline T *core_util_atomic_load (T *const *valuePtr) noexcept;
00952 /** \copydoc core_util_atomic_store_ptr */
00953 template<typename T> inline void core_util_atomic_store (T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
00954 /** \copydoc core_util_atomic_store_ptr */
00955 template<typename T> inline void core_util_atomic_store (T **valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
00956 /** \copydoc core_util_atomic_exchange_ptr */
00957 template<typename T> inline T *core_util_atomic_exchange (T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
00958 /** \copydoc core_util_atomic_cas_ptr */
00959 template<typename T> inline bool core_util_atomic_compare_exchange_strong (T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue) noexcept;
00960 /** \copydoc core_util_atomic_compare_exchange_weak_ptr */
00961 template<typename T> inline bool core_util_atomic_compare_exchange_weak (T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue) noexcept;
00962 /** \copydoc core_util_fetch_add_ptr */
00963 template<typename T> inline T *core_util_atomic_fetch_add (T *volatile *valuePtr, ptrdiff_t arg) noexcept;
00964 /** \copydoc core_util_fetch_sub_ptr */
00965 template<typename T> inline T *core_util_atomic_fetch_sub (T *volatile *valuePtr, ptrdiff_t arg) noexcept;
00966 
00967 /** \copydoc core_util_atomic_load_explicit_ptr */
00968 template<typename T> inline T *core_util_atomic_load_explicit (T *const volatile *valuePtr, mbed_memory_order order) noexcept;
00969 /** \copydoc core_util_atomic_load_explicit_ptr */
00970 template<typename T> inline T *core_util_atomic_load_explicit (T *const *valuePtr, mbed_memory_order order) noexcept;
00971 /** \copydoc core_util_atomic_store_explicit_ptr */
00972 template<typename T> inline void core_util_atomic_store_explicit (T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
00973 /** \copydoc core_util_atomic_store_explicit_ptr */
00974 template<typename T> inline void core_util_atomic_store_explicit (T **valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
00975 /** \copydoc core_util_atomic_exchange_explicit_ptr */
00976 template<typename T> inline T *core_util_atomic_exchange_explicit (T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
00977 /** \copydoc core_util_atomic_cas_explicit_ptr */
00978 template<typename T> inline bool core_util_atomic_compare_exchange_strong_explicit (T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
00979 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_ptr */
00980 template<typename T> inline bool core_util_atomic_compare_exchange_weak_explicit (T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
00981 /** \copydoc core_util_fetch_add_explicit_ptr */
00982 template<typename T> inline T *core_util_atomic_fetch_add_explicit (T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept;
00983 /** \copydoc core_util_fetch_sub_explicit_ptr */
00984 template<typename T> inline T *core_util_atomic_fetch_sub_explicit (T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept;
00985 
00986 #endif // __cplusplus
00987 
00988 /**@}*/
00989 
00990 /**@}*/
00991 
00992 /* Hide the implementation away */
00993 #include "platform/internal/mbed_atomic_impl.h"
00994 
00995 #endif // __MBED_UTIL_ATOMICL_H__
00996 
00997 
00998