mbed-os5 only for TYBLE16

Dependents:   TYBLE16_simple_data_logger TYBLE16_MP3_Air

Committer:
kenjiArai
Date:
Tue Dec 31 06:02:27 2019 +0000
Revision:
1:9db0e321a9f4
Parent:
platform/internal/mbed_atomic_impl.c@0:5b88d5760320
updated based on mbed-os5.15.0

Who changed what in which revision?

UserRevisionLine numberNew contents of line
kenjiArai 0:5b88d5760320 1 /*
kenjiArai 0:5b88d5760320 2 * Copyright (c) 2019, ARM Limited, All Rights Reserved
kenjiArai 0:5b88d5760320 3 * SPDX-License-Identifier: Apache-2.0
kenjiArai 0:5b88d5760320 4 *
kenjiArai 0:5b88d5760320 5 * Licensed under the Apache License, Version 2.0 (the "License"); you may
kenjiArai 0:5b88d5760320 6 * not use this file except in compliance with the License.
kenjiArai 0:5b88d5760320 7 * You may obtain a copy of the License at
kenjiArai 0:5b88d5760320 8 *
kenjiArai 0:5b88d5760320 9 * http://www.apache.org/licenses/LICENSE-2.0
kenjiArai 0:5b88d5760320 10 *
kenjiArai 0:5b88d5760320 11 * Unless required by applicable law or agreed to in writing, software
kenjiArai 0:5b88d5760320 12 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
kenjiArai 0:5b88d5760320 13 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
kenjiArai 0:5b88d5760320 14 * See the License for the specific language governing permissions and
kenjiArai 0:5b88d5760320 15 * limitations under the License.
kenjiArai 0:5b88d5760320 16 */
kenjiArai 0:5b88d5760320 17
kenjiArai 0:5b88d5760320 18 #include "platform/mbed_assert.h"
kenjiArai 0:5b88d5760320 19 #include "platform/mbed_atomic.h"
kenjiArai 0:5b88d5760320 20 #include "platform/mbed_critical.h"
kenjiArai 0:5b88d5760320 21
kenjiArai 0:5b88d5760320 22 /* Inline bool implementations in the header use uint8_t versions to manipulate the bool */
kenjiArai 0:5b88d5760320 23 MBED_STATIC_ASSERT(sizeof(bool) == sizeof(uint8_t), "Surely bool is a byte");
kenjiArai 0:5b88d5760320 24
kenjiArai 0:5b88d5760320 25 /* Inline implementations in the header use uint32_t versions to manipulate pointers */
kenjiArai 0:5b88d5760320 26 MBED_STATIC_ASSERT(sizeof(void *) == sizeof(uint32_t), "Alas, pointers must be 32-bit");
kenjiArai 0:5b88d5760320 27
kenjiArai 0:5b88d5760320 28
kenjiArai 0:5b88d5760320 29 #define DO_MBED_LOCKED_OP(name, OP, retValue, T, fn_suffix) \
kenjiArai 0:5b88d5760320 30 T core_util_atomic_##name##_##fn_suffix(volatile T *valuePtr, T arg) \
kenjiArai 0:5b88d5760320 31 { \
kenjiArai 0:5b88d5760320 32 T oldValue, newValue; \
kenjiArai 0:5b88d5760320 33 core_util_critical_section_enter(); \
kenjiArai 0:5b88d5760320 34 oldValue = *valuePtr; \
kenjiArai 0:5b88d5760320 35 newValue = OP; \
kenjiArai 0:5b88d5760320 36 *valuePtr = newValue; \
kenjiArai 0:5b88d5760320 37 core_util_critical_section_exit(); \
kenjiArai 0:5b88d5760320 38 return retValue; \
kenjiArai 0:5b88d5760320 39 }
kenjiArai 0:5b88d5760320 40
kenjiArai 0:5b88d5760320 41 #define DO_MBED_LOCKED_CAS_OP(T, fn_suffix) \
kenjiArai 0:5b88d5760320 42 bool core_util_atomic_cas_##fn_suffix(volatile T *ptr, T *expectedCurrentValue, T desiredValue) \
kenjiArai 0:5b88d5760320 43 { \
kenjiArai 0:5b88d5760320 44 bool success; \
kenjiArai 0:5b88d5760320 45 T currentValue; \
kenjiArai 0:5b88d5760320 46 core_util_critical_section_enter(); \
kenjiArai 0:5b88d5760320 47 currentValue = *ptr; \
kenjiArai 0:5b88d5760320 48 if (currentValue == *expectedCurrentValue) { \
kenjiArai 0:5b88d5760320 49 *ptr = desiredValue; \
kenjiArai 0:5b88d5760320 50 success = true; \
kenjiArai 0:5b88d5760320 51 } else { \
kenjiArai 0:5b88d5760320 52 *expectedCurrentValue = currentValue; \
kenjiArai 0:5b88d5760320 53 success = false; \
kenjiArai 0:5b88d5760320 54 } \
kenjiArai 0:5b88d5760320 55 core_util_critical_section_exit(); \
kenjiArai 0:5b88d5760320 56 return success; \
kenjiArai 0:5b88d5760320 57 } \
kenjiArai 0:5b88d5760320 58 \
kenjiArai 0:5b88d5760320 59 bool core_util_atomic_compare_exchange_weak_##fn_suffix(volatile T *ptr, \
kenjiArai 0:5b88d5760320 60 T *expectedCurrentValue, T desiredValue) \
kenjiArai 0:5b88d5760320 61 { \
kenjiArai 0:5b88d5760320 62 return core_util_atomic_cas_##fn_suffix(ptr, expectedCurrentValue, desiredValue); \
kenjiArai 0:5b88d5760320 63 }
kenjiArai 0:5b88d5760320 64
kenjiArai 0:5b88d5760320 65 #if MBED_EXCLUSIVE_ACCESS
kenjiArai 0:5b88d5760320 66 /* These are the C99 external definitions for the inline functions */
kenjiArai 0:5b88d5760320 67 /* We maintain external definitions rather than using "static inline" for backwards binary compatibility
kenjiArai 0:5b88d5760320 68 * and to give the compiler plenty of leeway to choose to not inline in both C and C++ modes
kenjiArai 0:5b88d5760320 69 */
kenjiArai 0:5b88d5760320 70
kenjiArai 0:5b88d5760320 71 extern inline bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr);
kenjiArai 0:5b88d5760320 72
kenjiArai 0:5b88d5760320 73 extern inline uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t newValue);
kenjiArai 0:5b88d5760320 74 extern inline uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t newValue);
kenjiArai 0:5b88d5760320 75 extern inline uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t newValue);
kenjiArai 0:5b88d5760320 76 extern inline uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t arg);
kenjiArai 0:5b88d5760320 77 extern inline uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t arg);
kenjiArai 0:5b88d5760320 78 extern inline uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t arg);
kenjiArai 0:5b88d5760320 79 extern inline uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t arg);
kenjiArai 0:5b88d5760320 80 extern inline uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t arg);
kenjiArai 0:5b88d5760320 81 extern inline uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t arg);
kenjiArai 0:5b88d5760320 82 extern inline uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg);
kenjiArai 0:5b88d5760320 83 extern inline uint16_t core_util_atomic_fetch_add_u16(volatile uint16_t *valuePtr, uint16_t arg);
kenjiArai 0:5b88d5760320 84 extern inline uint32_t core_util_atomic_fetch_add_u32(volatile uint32_t *valuePtr, uint32_t arg);
kenjiArai 0:5b88d5760320 85 extern inline uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg);
kenjiArai 0:5b88d5760320 86 extern inline uint16_t core_util_atomic_fetch_sub_u16(volatile uint16_t *valuePtr, uint16_t arg);
kenjiArai 0:5b88d5760320 87 extern inline uint32_t core_util_atomic_fetch_sub_u32(volatile uint32_t *valuePtr, uint32_t arg);
kenjiArai 0:5b88d5760320 88 extern inline uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg);
kenjiArai 0:5b88d5760320 89 extern inline uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg);
kenjiArai 0:5b88d5760320 90 extern inline uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg);
kenjiArai 0:5b88d5760320 91 extern inline uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg);
kenjiArai 0:5b88d5760320 92 extern inline uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg);
kenjiArai 0:5b88d5760320 93 extern inline uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg);
kenjiArai 0:5b88d5760320 94 extern inline uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg);
kenjiArai 0:5b88d5760320 95 extern inline uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg);
kenjiArai 0:5b88d5760320 96 extern inline uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg);
kenjiArai 0:5b88d5760320 97 extern inline bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
kenjiArai 0:5b88d5760320 98 extern inline bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
kenjiArai 0:5b88d5760320 99 extern inline bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
kenjiArai 0:5b88d5760320 100 extern inline bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
kenjiArai 0:5b88d5760320 101 extern inline bool core_util_atomic_compare_exchange_weak_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
kenjiArai 0:5b88d5760320 102 extern inline bool core_util_atomic_compare_exchange_weak_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
kenjiArai 0:5b88d5760320 103
kenjiArai 0:5b88d5760320 104 #else
kenjiArai 0:5b88d5760320 105
kenjiArai 0:5b88d5760320 106 bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
kenjiArai 0:5b88d5760320 107 {
kenjiArai 0:5b88d5760320 108 core_util_critical_section_enter();
kenjiArai 0:5b88d5760320 109 uint8_t currentValue = flagPtr->_flag;
kenjiArai 0:5b88d5760320 110 flagPtr->_flag = true;
kenjiArai 0:5b88d5760320 111 core_util_critical_section_exit();
kenjiArai 0:5b88d5760320 112 return currentValue;
kenjiArai 0:5b88d5760320 113 }
kenjiArai 0:5b88d5760320 114 #endif
kenjiArai 0:5b88d5760320 115
kenjiArai 0:5b88d5760320 116 /* No architecture we support has LDREXD/STREXD, so must always disable IRQs for 64-bit operations */
kenjiArai 0:5b88d5760320 117 uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr)
kenjiArai 0:5b88d5760320 118 {
kenjiArai 0:5b88d5760320 119 core_util_critical_section_enter();
kenjiArai 0:5b88d5760320 120 uint64_t currentValue = *valuePtr;
kenjiArai 0:5b88d5760320 121 core_util_critical_section_exit();
kenjiArai 0:5b88d5760320 122 return currentValue;
kenjiArai 0:5b88d5760320 123 }
kenjiArai 0:5b88d5760320 124
kenjiArai 0:5b88d5760320 125 void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
kenjiArai 0:5b88d5760320 126 {
kenjiArai 0:5b88d5760320 127 core_util_critical_section_enter();
kenjiArai 0:5b88d5760320 128 *valuePtr = desiredValue;
kenjiArai 0:5b88d5760320 129 core_util_critical_section_exit();
kenjiArai 0:5b88d5760320 130 }
kenjiArai 0:5b88d5760320 131
kenjiArai 0:5b88d5760320 132 /* Now locked operations for whichever we don't have lock-free ones for */
kenjiArai 0:5b88d5760320 133 #if MBED_EXCLUSIVE_ACCESS
kenjiArai 0:5b88d5760320 134 /* Just need 64-bit locked operations */
kenjiArai 0:5b88d5760320 135 #define DO_MBED_LOCKED_OPS(name, OP, retValue) \
kenjiArai 0:5b88d5760320 136 DO_MBED_LOCKED_OP(name, OP, retValue, uint64_t, u64)
kenjiArai 0:5b88d5760320 137 #define DO_MBED_LOCKED_CAS_OPS() \
kenjiArai 0:5b88d5760320 138 DO_MBED_LOCKED_CAS_OP(uint64_t, u64)
kenjiArai 0:5b88d5760320 139 #else
kenjiArai 0:5b88d5760320 140 /* All the operations are locked */
kenjiArai 0:5b88d5760320 141 #define DO_MBED_LOCKED_OPS(name, OP, retValue) \
kenjiArai 0:5b88d5760320 142 DO_MBED_LOCKED_OP(name, OP, retValue, uint8_t, u8) \
kenjiArai 0:5b88d5760320 143 DO_MBED_LOCKED_OP(name, OP, retValue, uint16_t, u16) \
kenjiArai 0:5b88d5760320 144 DO_MBED_LOCKED_OP(name, OP, retValue, uint32_t, u32) \
kenjiArai 0:5b88d5760320 145 DO_MBED_LOCKED_OP(name, OP, retValue, uint64_t, u64)
kenjiArai 0:5b88d5760320 146 #define DO_MBED_LOCKED_CAS_OPS() \
kenjiArai 0:5b88d5760320 147 DO_MBED_LOCKED_CAS_OP(uint8_t, u8) \
kenjiArai 0:5b88d5760320 148 DO_MBED_LOCKED_CAS_OP(uint16_t, u16) \
kenjiArai 0:5b88d5760320 149 DO_MBED_LOCKED_CAS_OP(uint32_t, u32) \
kenjiArai 0:5b88d5760320 150 DO_MBED_LOCKED_CAS_OP(uint64_t, u64)
kenjiArai 0:5b88d5760320 151 #endif
kenjiArai 0:5b88d5760320 152
kenjiArai 0:5b88d5760320 153 // *INDENT-OFF*
kenjiArai 0:5b88d5760320 154 DO_MBED_LOCKED_OPS(exchange, arg, oldValue)
kenjiArai 0:5b88d5760320 155 DO_MBED_LOCKED_OPS(incr, oldValue + arg, newValue)
kenjiArai 0:5b88d5760320 156 DO_MBED_LOCKED_OPS(decr, oldValue - arg, newValue)
kenjiArai 0:5b88d5760320 157 DO_MBED_LOCKED_OPS(fetch_add, oldValue + arg, oldValue)
kenjiArai 0:5b88d5760320 158 DO_MBED_LOCKED_OPS(fetch_sub, oldValue - arg, oldValue)
kenjiArai 0:5b88d5760320 159 DO_MBED_LOCKED_OPS(fetch_and, oldValue & arg, oldValue)
kenjiArai 0:5b88d5760320 160 DO_MBED_LOCKED_OPS(fetch_or, oldValue | arg, oldValue)
kenjiArai 0:5b88d5760320 161 DO_MBED_LOCKED_OPS(fetch_xor, oldValue ^ arg, oldValue)
kenjiArai 0:5b88d5760320 162 DO_MBED_LOCKED_CAS_OPS()
kenjiArai 0:5b88d5760320 163 // *INDENT-ON*
kenjiArai 0:5b88d5760320 164
kenjiArai 0:5b88d5760320 165 /* Similar functions for s32 etc are static inline, but these are extern inline for legacy binary compatibility */
kenjiArai 0:5b88d5760320 166 extern inline void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue);
kenjiArai 0:5b88d5760320 167 extern inline void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
kenjiArai 0:5b88d5760320 168 extern inline void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
kenjiArai 0:5b88d5760320 169 extern inline bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);