Kenji Arai / mbed-os_TYBLE16

Dependents:   TYBLE16_simple_data_logger TYBLE16_MP3_Air

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_atomic_impl.c Source File

mbed_atomic_impl.c

00001 /*
00002  * Copyright (c) 2019, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 #include "platform/mbed_assert.h"
00019 #include "platform/mbed_atomic.h"
00020 #include "platform/mbed_critical.h"
00021 
00022 /* Inline bool implementations in the header use uint8_t versions to manipulate the bool */
00023 MBED_STATIC_ASSERT(sizeof(bool) == sizeof(uint8_t), "Surely bool is a byte");
00024 
00025 /* Inline implementations in the header use uint32_t versions to manipulate pointers */
00026 MBED_STATIC_ASSERT(sizeof(void *) == sizeof(uint32_t), "Alas, pointers must be 32-bit");
00027 
00028 
00029 #define DO_MBED_LOCKED_OP(name, OP, retValue, T, fn_suffix)             \
00030 T core_util_atomic_##name##_##fn_suffix(volatile T *valuePtr, T arg)    \
00031 {                                                                       \
00032     T oldValue, newValue;                                               \
00033     core_util_critical_section_enter();                                 \
00034     oldValue = *valuePtr;                                               \
00035     newValue = OP;                                                      \
00036     *valuePtr = newValue;                                               \
00037     core_util_critical_section_exit();                                  \
00038     return retValue;                                                    \
00039 }
00040 
00041 #define DO_MBED_LOCKED_CAS_OP(T, fn_suffix)                                                     \
00042 bool core_util_atomic_cas_##fn_suffix(volatile T *ptr, T *expectedCurrentValue, T desiredValue) \
00043 {                                                                                               \
00044     bool success;                                                                               \
00045     T currentValue;                                                                             \
00046     core_util_critical_section_enter();                                                         \
00047     currentValue = *ptr;                                                                        \
00048     if (currentValue == *expectedCurrentValue) {                                                \
00049         *ptr = desiredValue;                                                                    \
00050         success = true;                                                                         \
00051     } else {                                                                                    \
00052         *expectedCurrentValue = currentValue;                                                   \
00053         success = false;                                                                        \
00054     }                                                                                           \
00055     core_util_critical_section_exit();                                                          \
00056     return success;                                                                             \
00057 }                                                                                               \
00058                                                                                                 \
00059 bool core_util_atomic_compare_exchange_weak_##fn_suffix(volatile T *ptr,                        \
00060         T *expectedCurrentValue, T desiredValue)                                                \
00061 {                                                                                               \
00062     return core_util_atomic_cas_##fn_suffix(ptr, expectedCurrentValue, desiredValue);           \
00063 }
00064 
00065 #if MBED_EXCLUSIVE_ACCESS
00066 /* These are the C99 external definitions for the inline functions */
00067 /* We maintain external definitions rather than using "static inline" for backwards binary compatibility
00068  * and to give the compiler plenty of leeway to choose to not inline in both C and C++ modes
00069  */
00070 
00071 extern inline bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr);
00072 
00073 extern inline uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t newValue);
00074 extern inline uint16_t core_util_atomic_exchange_u16 (volatile uint16_t *valuePtr, uint16_t newValue);
00075 extern inline uint32_t core_util_atomic_exchange_u32 (volatile uint32_t *valuePtr, uint32_t newValue);
00076 extern inline uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t arg);
00077 extern inline uint16_t core_util_atomic_incr_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00078 extern inline uint32_t core_util_atomic_incr_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00079 extern inline uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t arg);
00080 extern inline uint16_t core_util_atomic_decr_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00081 extern inline uint32_t core_util_atomic_decr_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00082 extern inline uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg);
00083 extern inline uint16_t core_util_atomic_fetch_add_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00084 extern inline uint32_t core_util_atomic_fetch_add_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00085 extern inline uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg);
00086 extern inline uint16_t core_util_atomic_fetch_sub_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00087 extern inline uint32_t core_util_atomic_fetch_sub_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00088 extern inline uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg);
00089 extern inline uint16_t core_util_atomic_fetch_and_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00090 extern inline uint32_t core_util_atomic_fetch_and_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00091 extern inline uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg);
00092 extern inline uint16_t core_util_atomic_fetch_or_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00093 extern inline uint32_t core_util_atomic_fetch_or_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00094 extern inline uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg);
00095 extern inline uint16_t core_util_atomic_fetch_xor_u16 (volatile uint16_t *valuePtr, uint16_t arg);
00096 extern inline uint32_t core_util_atomic_fetch_xor_u32 (volatile uint32_t *valuePtr, uint32_t arg);
00097 extern inline bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
00098 extern inline bool core_util_atomic_cas_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
00099 extern inline bool core_util_atomic_cas_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
00100 extern inline bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
00101 extern inline bool core_util_atomic_compare_exchange_weak_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
00102 extern inline bool core_util_atomic_compare_exchange_weak_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
00103 
00104 #else
00105 
00106 bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
00107 {
00108     core_util_critical_section_enter();
00109     uint8_t currentValue = flagPtr->_flag;
00110     flagPtr->_flag = true;
00111     core_util_critical_section_exit();
00112     return currentValue;
00113 }
00114 #endif
00115 
00116 /* No architecture we support has LDREXD/STREXD, so must always disable IRQs for 64-bit operations */
00117 uint64_t core_util_atomic_load_u64 (const volatile uint64_t *valuePtr)
00118 {
00119     core_util_critical_section_enter();
00120     uint64_t currentValue = *valuePtr;
00121     core_util_critical_section_exit();
00122     return currentValue;
00123 }
00124 
00125 void core_util_atomic_store_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue)
00126 {
00127     core_util_critical_section_enter();
00128     *valuePtr = desiredValue;
00129     core_util_critical_section_exit();
00130 }
00131 
00132 /* Now locked operations for whichever we don't have lock-free ones for */
00133 #if MBED_EXCLUSIVE_ACCESS
00134 /* Just need 64-bit locked operations */
00135 #define DO_MBED_LOCKED_OPS(name, OP, retValue) \
00136     DO_MBED_LOCKED_OP(name, OP, retValue, uint64_t, u64)
00137 #define DO_MBED_LOCKED_CAS_OPS() \
00138     DO_MBED_LOCKED_CAS_OP(uint64_t, u64)
00139 #else
00140 /* All the operations are locked */
00141 #define DO_MBED_LOCKED_OPS(name, OP, retValue) \
00142     DO_MBED_LOCKED_OP(name, OP, retValue, uint8_t,  u8) \
00143     DO_MBED_LOCKED_OP(name, OP, retValue, uint16_t, u16) \
00144     DO_MBED_LOCKED_OP(name, OP, retValue, uint32_t, u32) \
00145     DO_MBED_LOCKED_OP(name, OP, retValue, uint64_t, u64)
00146 #define DO_MBED_LOCKED_CAS_OPS() \
00147     DO_MBED_LOCKED_CAS_OP(uint8_t,  u8) \
00148     DO_MBED_LOCKED_CAS_OP(uint16_t, u16) \
00149     DO_MBED_LOCKED_CAS_OP(uint32_t, u32) \
00150     DO_MBED_LOCKED_CAS_OP(uint64_t, u64)
00151 #endif
00152 
00153 // *INDENT-OFF*
00154 DO_MBED_LOCKED_OPS(exchange,  arg,            oldValue)
00155 DO_MBED_LOCKED_OPS(incr,      oldValue + arg, newValue)
00156 DO_MBED_LOCKED_OPS(decr,      oldValue - arg, newValue)
00157 DO_MBED_LOCKED_OPS(fetch_add, oldValue + arg, oldValue)
00158 DO_MBED_LOCKED_OPS(fetch_sub, oldValue - arg, oldValue)
00159 DO_MBED_LOCKED_OPS(fetch_and, oldValue & arg, oldValue)
00160 DO_MBED_LOCKED_OPS(fetch_or,  oldValue | arg, oldValue)
00161 DO_MBED_LOCKED_OPS(fetch_xor, oldValue ^ arg, oldValue)
00162 DO_MBED_LOCKED_CAS_OPS()
00163 // *INDENT-ON*
00164 
00165 /* Similar functions for s32 etc are static inline, but these are extern inline for legacy binary compatibility */
00166 extern inline void *core_util_atomic_exchange_ptr (void *volatile *valuePtr, void *desiredValue);
00167 extern inline void *core_util_atomic_incr_ptr (void *volatile *valuePtr, ptrdiff_t delta);
00168 extern inline void *core_util_atomic_decr_ptr (void *volatile *valuePtr, ptrdiff_t delta);
00169 extern inline bool core_util_atomic_cas_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);