RTC auf true

Committer:
kevman
Date:
Wed Mar 13 11:03:24 2019 +0000
Revision:
2:7aab896b1a3b
Parent:
0:38ceb79fef03
2019-03-13

Who changed what in which revision?

UserRevisionLine numberNew contents of line
kevman 0:38ceb79fef03 1 /*
kevman 0:38ceb79fef03 2 * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
kevman 0:38ceb79fef03 3 * SPDX-License-Identifier: Apache-2.0
kevman 0:38ceb79fef03 4 *
kevman 0:38ceb79fef03 5 * Licensed under the Apache License, Version 2.0 (the "License"); you may
kevman 0:38ceb79fef03 6 * not use this file except in compliance with the License.
kevman 0:38ceb79fef03 7 * You may obtain a copy of the License at
kevman 0:38ceb79fef03 8 *
kevman 0:38ceb79fef03 9 * http://www.apache.org/licenses/LICENSE-2.0
kevman 0:38ceb79fef03 10 *
kevman 0:38ceb79fef03 11 * Unless required by applicable law or agreed to in writing, software
kevman 0:38ceb79fef03 12 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
kevman 0:38ceb79fef03 13 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
kevman 0:38ceb79fef03 14 * See the License for the specific language governing permissions and
kevman 0:38ceb79fef03 15 * limitations under the License.
kevman 0:38ceb79fef03 16 */
kevman 0:38ceb79fef03 17
kevman 0:38ceb79fef03 18 /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
kevman 0:38ceb79fef03 19 #define __STDC_LIMIT_MACROS
kevman 0:38ceb79fef03 20 #include "hal/critical_section_api.h"
kevman 0:38ceb79fef03 21
kevman 0:38ceb79fef03 22 #include "cmsis.h"
kevman 0:38ceb79fef03 23 #include "platform/mbed_assert.h"
kevman 0:38ceb79fef03 24 #include "platform/mbed_critical.h"
kevman 0:38ceb79fef03 25 #include "platform/mbed_toolchain.h"
kevman 0:38ceb79fef03 26
kevman 0:38ceb79fef03 27 // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
kevman 0:38ceb79fef03 28 #ifndef MBED_EXCLUSIVE_ACCESS
kevman 0:38ceb79fef03 29 #ifndef __EXCLUSIVE_ACCESS
kevman 0:38ceb79fef03 30 #if ((__ARM_ARCH_7M__ == 1U) || \
kevman 0:38ceb79fef03 31 (__ARM_ARCH_7EM__ == 1U) || \
kevman 0:38ceb79fef03 32 (__ARM_ARCH_8M_BASE__ == 1U) || \
kevman 0:38ceb79fef03 33 (__ARM_ARCH_8M_MAIN__ == 1U)) || \
kevman 0:38ceb79fef03 34 (__ARM_ARCH_7A__ == 1U)
kevman 0:38ceb79fef03 35 #define MBED_EXCLUSIVE_ACCESS 1U
kevman 0:38ceb79fef03 36 #elif (__ARM_ARCH_6M__ == 1U)
kevman 0:38ceb79fef03 37 #define MBED_EXCLUSIVE_ACCESS 0U
kevman 0:38ceb79fef03 38 #else
kevman 0:38ceb79fef03 39 #error "Unknown architecture for exclusive access"
kevman 0:38ceb79fef03 40 #endif
kevman 0:38ceb79fef03 41 #else
kevman 0:38ceb79fef03 42 #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
kevman 0:38ceb79fef03 43 #endif
kevman 0:38ceb79fef03 44 #endif
kevman 0:38ceb79fef03 45
kevman 0:38ceb79fef03 46 static volatile uint32_t critical_section_reentrancy_counter = 0;
kevman 0:38ceb79fef03 47
kevman 0:38ceb79fef03 48 bool core_util_are_interrupts_enabled(void)
kevman 0:38ceb79fef03 49 {
kevman 0:38ceb79fef03 50 #if defined(__CORTEX_A9)
kevman 0:38ceb79fef03 51 return ((__get_CPSR() & 0x80) == 0);
kevman 0:38ceb79fef03 52 #else
kevman 0:38ceb79fef03 53 return ((__get_PRIMASK() & 0x1) == 0);
kevman 0:38ceb79fef03 54 #endif
kevman 0:38ceb79fef03 55 }
kevman 0:38ceb79fef03 56
kevman 0:38ceb79fef03 57 bool core_util_is_isr_active(void)
kevman 0:38ceb79fef03 58 {
kevman 0:38ceb79fef03 59 #if defined(__CORTEX_A9)
kevman 0:38ceb79fef03 60 switch (__get_CPSR() & 0x1FU) {
kevman 0:38ceb79fef03 61 case CPSR_M_USR:
kevman 0:38ceb79fef03 62 case CPSR_M_SYS:
kevman 0:38ceb79fef03 63 return false;
kevman 0:38ceb79fef03 64 case CPSR_M_SVC:
kevman 0:38ceb79fef03 65 default:
kevman 0:38ceb79fef03 66 return true;
kevman 0:38ceb79fef03 67 }
kevman 0:38ceb79fef03 68 #else
kevman 0:38ceb79fef03 69 return (__get_IPSR() != 0U);
kevman 0:38ceb79fef03 70 #endif
kevman 0:38ceb79fef03 71 }
kevman 0:38ceb79fef03 72
kevman 0:38ceb79fef03 73 bool core_util_in_critical_section(void)
kevman 0:38ceb79fef03 74 {
kevman 0:38ceb79fef03 75 return hal_in_critical_section();
kevman 0:38ceb79fef03 76 }
kevman 0:38ceb79fef03 77
kevman 0:38ceb79fef03 78 void core_util_critical_section_enter(void)
kevman 0:38ceb79fef03 79 {
kevman 0:38ceb79fef03 80 // If the reentrancy counter overflows something has gone badly wrong.
kevman 0:38ceb79fef03 81 MBED_ASSERT(critical_section_reentrancy_counter < UINT32_MAX);
kevman 0:38ceb79fef03 82
kevman 0:38ceb79fef03 83 hal_critical_section_enter();
kevman 0:38ceb79fef03 84
kevman 0:38ceb79fef03 85 ++critical_section_reentrancy_counter;
kevman 0:38ceb79fef03 86 }
kevman 0:38ceb79fef03 87
kevman 0:38ceb79fef03 88 void core_util_critical_section_exit(void)
kevman 0:38ceb79fef03 89 {
kevman 0:38ceb79fef03 90
kevman 0:38ceb79fef03 91 // If critical_section_enter has not previously been called, do nothing
kevman 0:38ceb79fef03 92 if (critical_section_reentrancy_counter == 0) {
kevman 0:38ceb79fef03 93 return;
kevman 0:38ceb79fef03 94 }
kevman 0:38ceb79fef03 95
kevman 0:38ceb79fef03 96 --critical_section_reentrancy_counter;
kevman 0:38ceb79fef03 97
kevman 0:38ceb79fef03 98 if (critical_section_reentrancy_counter == 0) {
kevman 0:38ceb79fef03 99 hal_critical_section_exit();
kevman 0:38ceb79fef03 100 }
kevman 0:38ceb79fef03 101 }
kevman 0:38ceb79fef03 102
kevman 0:38ceb79fef03 103 #if MBED_EXCLUSIVE_ACCESS
kevman 0:38ceb79fef03 104
kevman 0:38ceb79fef03 105 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
kevman 0:38ceb79fef03 106 #if defined (__CC_ARM)
kevman 0:38ceb79fef03 107 #pragma diag_suppress 3731
kevman 0:38ceb79fef03 108 #endif
kevman 0:38ceb79fef03 109
kevman 0:38ceb79fef03 110 bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
kevman 0:38ceb79fef03 111 {
kevman 0:38ceb79fef03 112 do {
kevman 0:38ceb79fef03 113 uint8_t currentValue = __LDREXB(ptr);
kevman 0:38ceb79fef03 114 if (currentValue != *expectedCurrentValue) {
kevman 0:38ceb79fef03 115 *expectedCurrentValue = currentValue;
kevman 0:38ceb79fef03 116 __CLREX();
kevman 0:38ceb79fef03 117 return false;
kevman 0:38ceb79fef03 118 }
kevman 0:38ceb79fef03 119 } while (__STREXB(desiredValue, ptr));
kevman 0:38ceb79fef03 120 return true;
kevman 0:38ceb79fef03 121 }
kevman 0:38ceb79fef03 122
kevman 0:38ceb79fef03 123 bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
kevman 0:38ceb79fef03 124 {
kevman 0:38ceb79fef03 125 do {
kevman 0:38ceb79fef03 126 uint16_t currentValue = __LDREXH(ptr);
kevman 0:38ceb79fef03 127 if (currentValue != *expectedCurrentValue) {
kevman 0:38ceb79fef03 128 *expectedCurrentValue = currentValue;
kevman 0:38ceb79fef03 129 __CLREX();
kevman 0:38ceb79fef03 130 return false;
kevman 0:38ceb79fef03 131 }
kevman 0:38ceb79fef03 132 } while (__STREXH(desiredValue, ptr));
kevman 0:38ceb79fef03 133 return true;
kevman 0:38ceb79fef03 134 }
kevman 0:38ceb79fef03 135
kevman 0:38ceb79fef03 136
kevman 0:38ceb79fef03 137 bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
kevman 0:38ceb79fef03 138 {
kevman 0:38ceb79fef03 139 do {
kevman 0:38ceb79fef03 140 uint32_t currentValue = __LDREXW(ptr);
kevman 0:38ceb79fef03 141 if (currentValue != *expectedCurrentValue) {
kevman 0:38ceb79fef03 142 *expectedCurrentValue = currentValue;
kevman 0:38ceb79fef03 143 __CLREX();
kevman 0:38ceb79fef03 144 return false;
kevman 0:38ceb79fef03 145 }
kevman 0:38ceb79fef03 146 } while (__STREXW(desiredValue, ptr));
kevman 0:38ceb79fef03 147 return true;
kevman 0:38ceb79fef03 148 }
kevman 0:38ceb79fef03 149
kevman 0:38ceb79fef03 150 uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
kevman 0:38ceb79fef03 151 {
kevman 0:38ceb79fef03 152 uint8_t newValue;
kevman 0:38ceb79fef03 153 do {
kevman 0:38ceb79fef03 154 newValue = __LDREXB(valuePtr) + delta;
kevman 0:38ceb79fef03 155 } while (__STREXB(newValue, valuePtr));
kevman 0:38ceb79fef03 156 return newValue;
kevman 0:38ceb79fef03 157 }
kevman 0:38ceb79fef03 158
kevman 0:38ceb79fef03 159 uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
kevman 0:38ceb79fef03 160 {
kevman 0:38ceb79fef03 161 uint16_t newValue;
kevman 0:38ceb79fef03 162 do {
kevman 0:38ceb79fef03 163 newValue = __LDREXH(valuePtr) + delta;
kevman 0:38ceb79fef03 164 } while (__STREXH(newValue, valuePtr));
kevman 0:38ceb79fef03 165 return newValue;
kevman 0:38ceb79fef03 166 }
kevman 0:38ceb79fef03 167
kevman 0:38ceb79fef03 168 uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
kevman 0:38ceb79fef03 169 {
kevman 0:38ceb79fef03 170 uint32_t newValue;
kevman 0:38ceb79fef03 171 do {
kevman 0:38ceb79fef03 172 newValue = __LDREXW(valuePtr) + delta;
kevman 0:38ceb79fef03 173 } while (__STREXW(newValue, valuePtr));
kevman 0:38ceb79fef03 174 return newValue;
kevman 0:38ceb79fef03 175 }
kevman 0:38ceb79fef03 176
kevman 0:38ceb79fef03 177
kevman 0:38ceb79fef03 178 uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
kevman 0:38ceb79fef03 179 {
kevman 0:38ceb79fef03 180 uint8_t newValue;
kevman 0:38ceb79fef03 181 do {
kevman 0:38ceb79fef03 182 newValue = __LDREXB(valuePtr) - delta;
kevman 0:38ceb79fef03 183 } while (__STREXB(newValue, valuePtr));
kevman 0:38ceb79fef03 184 return newValue;
kevman 0:38ceb79fef03 185 }
kevman 0:38ceb79fef03 186
kevman 0:38ceb79fef03 187 uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
kevman 0:38ceb79fef03 188 {
kevman 0:38ceb79fef03 189 uint16_t newValue;
kevman 0:38ceb79fef03 190 do {
kevman 0:38ceb79fef03 191 newValue = __LDREXH(valuePtr) - delta;
kevman 0:38ceb79fef03 192 } while (__STREXH(newValue, valuePtr));
kevman 0:38ceb79fef03 193 return newValue;
kevman 0:38ceb79fef03 194 }
kevman 0:38ceb79fef03 195
kevman 0:38ceb79fef03 196 uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
kevman 0:38ceb79fef03 197 {
kevman 0:38ceb79fef03 198 uint32_t newValue;
kevman 0:38ceb79fef03 199 do {
kevman 0:38ceb79fef03 200 newValue = __LDREXW(valuePtr) - delta;
kevman 0:38ceb79fef03 201 } while (__STREXW(newValue, valuePtr));
kevman 0:38ceb79fef03 202 return newValue;
kevman 0:38ceb79fef03 203 }
kevman 0:38ceb79fef03 204
kevman 0:38ceb79fef03 205 #else
kevman 0:38ceb79fef03 206
kevman 0:38ceb79fef03 207 bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
kevman 0:38ceb79fef03 208 {
kevman 0:38ceb79fef03 209 bool success;
kevman 0:38ceb79fef03 210 uint8_t currentValue;
kevman 0:38ceb79fef03 211 core_util_critical_section_enter();
kevman 0:38ceb79fef03 212 currentValue = *ptr;
kevman 0:38ceb79fef03 213 if (currentValue == *expectedCurrentValue) {
kevman 0:38ceb79fef03 214 *ptr = desiredValue;
kevman 0:38ceb79fef03 215 success = true;
kevman 0:38ceb79fef03 216 } else {
kevman 0:38ceb79fef03 217 *expectedCurrentValue = currentValue;
kevman 0:38ceb79fef03 218 success = false;
kevman 0:38ceb79fef03 219 }
kevman 0:38ceb79fef03 220 core_util_critical_section_exit();
kevman 0:38ceb79fef03 221 return success;
kevman 0:38ceb79fef03 222 }
kevman 0:38ceb79fef03 223
kevman 0:38ceb79fef03 224 bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
kevman 0:38ceb79fef03 225 {
kevman 0:38ceb79fef03 226 bool success;
kevman 0:38ceb79fef03 227 uint16_t currentValue;
kevman 0:38ceb79fef03 228 core_util_critical_section_enter();
kevman 0:38ceb79fef03 229 currentValue = *ptr;
kevman 0:38ceb79fef03 230 if (currentValue == *expectedCurrentValue) {
kevman 0:38ceb79fef03 231 *ptr = desiredValue;
kevman 0:38ceb79fef03 232 success = true;
kevman 0:38ceb79fef03 233 } else {
kevman 0:38ceb79fef03 234 *expectedCurrentValue = currentValue;
kevman 0:38ceb79fef03 235 success = false;
kevman 0:38ceb79fef03 236 }
kevman 0:38ceb79fef03 237 core_util_critical_section_exit();
kevman 0:38ceb79fef03 238 return success;
kevman 0:38ceb79fef03 239 }
kevman 0:38ceb79fef03 240
kevman 0:38ceb79fef03 241
kevman 0:38ceb79fef03 242 bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
kevman 0:38ceb79fef03 243 {
kevman 0:38ceb79fef03 244 bool success;
kevman 0:38ceb79fef03 245 uint32_t currentValue;
kevman 0:38ceb79fef03 246 core_util_critical_section_enter();
kevman 0:38ceb79fef03 247 currentValue = *ptr;
kevman 0:38ceb79fef03 248 if (currentValue == *expectedCurrentValue) {
kevman 0:38ceb79fef03 249 *ptr = desiredValue;
kevman 0:38ceb79fef03 250 success = true;
kevman 0:38ceb79fef03 251 } else {
kevman 0:38ceb79fef03 252 *expectedCurrentValue = currentValue;
kevman 0:38ceb79fef03 253 success = false;
kevman 0:38ceb79fef03 254 }
kevman 0:38ceb79fef03 255 core_util_critical_section_exit();
kevman 0:38ceb79fef03 256 return success;
kevman 0:38ceb79fef03 257 }
kevman 0:38ceb79fef03 258
kevman 0:38ceb79fef03 259
kevman 0:38ceb79fef03 260 uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
kevman 0:38ceb79fef03 261 {
kevman 0:38ceb79fef03 262 uint8_t newValue;
kevman 0:38ceb79fef03 263 core_util_critical_section_enter();
kevman 0:38ceb79fef03 264 newValue = *valuePtr + delta;
kevman 0:38ceb79fef03 265 *valuePtr = newValue;
kevman 0:38ceb79fef03 266 core_util_critical_section_exit();
kevman 0:38ceb79fef03 267 return newValue;
kevman 0:38ceb79fef03 268 }
kevman 0:38ceb79fef03 269
kevman 0:38ceb79fef03 270 uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
kevman 0:38ceb79fef03 271 {
kevman 0:38ceb79fef03 272 uint16_t newValue;
kevman 0:38ceb79fef03 273 core_util_critical_section_enter();
kevman 0:38ceb79fef03 274 newValue = *valuePtr + delta;
kevman 0:38ceb79fef03 275 *valuePtr = newValue;
kevman 0:38ceb79fef03 276 core_util_critical_section_exit();
kevman 0:38ceb79fef03 277 return newValue;
kevman 0:38ceb79fef03 278 }
kevman 0:38ceb79fef03 279
kevman 0:38ceb79fef03 280 uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
kevman 0:38ceb79fef03 281 {
kevman 0:38ceb79fef03 282 uint32_t newValue;
kevman 0:38ceb79fef03 283 core_util_critical_section_enter();
kevman 0:38ceb79fef03 284 newValue = *valuePtr + delta;
kevman 0:38ceb79fef03 285 *valuePtr = newValue;
kevman 0:38ceb79fef03 286 core_util_critical_section_exit();
kevman 0:38ceb79fef03 287 return newValue;
kevman 0:38ceb79fef03 288 }
kevman 0:38ceb79fef03 289
kevman 0:38ceb79fef03 290
kevman 0:38ceb79fef03 291 uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
kevman 0:38ceb79fef03 292 {
kevman 0:38ceb79fef03 293 uint8_t newValue;
kevman 0:38ceb79fef03 294 core_util_critical_section_enter();
kevman 0:38ceb79fef03 295 newValue = *valuePtr - delta;
kevman 0:38ceb79fef03 296 *valuePtr = newValue;
kevman 0:38ceb79fef03 297 core_util_critical_section_exit();
kevman 0:38ceb79fef03 298 return newValue;
kevman 0:38ceb79fef03 299 }
kevman 0:38ceb79fef03 300
kevman 0:38ceb79fef03 301 uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
kevman 0:38ceb79fef03 302 {
kevman 0:38ceb79fef03 303 uint16_t newValue;
kevman 0:38ceb79fef03 304 core_util_critical_section_enter();
kevman 0:38ceb79fef03 305 newValue = *valuePtr - delta;
kevman 0:38ceb79fef03 306 *valuePtr = newValue;
kevman 0:38ceb79fef03 307 core_util_critical_section_exit();
kevman 0:38ceb79fef03 308 return newValue;
kevman 0:38ceb79fef03 309 }
kevman 0:38ceb79fef03 310
kevman 0:38ceb79fef03 311 uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
kevman 0:38ceb79fef03 312 {
kevman 0:38ceb79fef03 313 uint32_t newValue;
kevman 0:38ceb79fef03 314 core_util_critical_section_enter();
kevman 0:38ceb79fef03 315 newValue = *valuePtr - delta;
kevman 0:38ceb79fef03 316 *valuePtr = newValue;
kevman 0:38ceb79fef03 317 core_util_critical_section_exit();
kevman 0:38ceb79fef03 318 return newValue;
kevman 0:38ceb79fef03 319 }
kevman 0:38ceb79fef03 320
kevman 0:38ceb79fef03 321 #endif
kevman 0:38ceb79fef03 322
kevman 0:38ceb79fef03 323
kevman 0:38ceb79fef03 324 bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
kevman 0:38ceb79fef03 325 {
kevman 0:38ceb79fef03 326 return core_util_atomic_cas_u32(
kevman 0:38ceb79fef03 327 (volatile uint32_t *)ptr,
kevman 0:38ceb79fef03 328 (uint32_t *)expectedCurrentValue,
kevman 0:38ceb79fef03 329 (uint32_t)desiredValue);
kevman 0:38ceb79fef03 330 }
kevman 0:38ceb79fef03 331
kevman 0:38ceb79fef03 332 void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
kevman 0:38ceb79fef03 333 {
kevman 0:38ceb79fef03 334 return (void *)core_util_atomic_incr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
kevman 0:38ceb79fef03 335 }
kevman 0:38ceb79fef03 336
kevman 0:38ceb79fef03 337 void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
kevman 0:38ceb79fef03 338 {
kevman 0:38ceb79fef03 339 return (void *)core_util_atomic_decr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
kevman 0:38ceb79fef03 340 }
kevman 0:38ceb79fef03 341