Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_critical.c Source File

mbed_critical.c

00001 /*
00002  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
00019 #define __STDC_LIMIT_MACROS
00020 #include "hal/critical_section_api.h"
00021 
00022 #include "cmsis.h"
00023 #include "platform/mbed_assert.h"
00024 #include "platform/mbed_critical.h"
00025 #include "platform/mbed_toolchain.h"
00026 
00027 // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
00028 #ifndef MBED_EXCLUSIVE_ACCESS
00029 #ifndef __EXCLUSIVE_ACCESS
00030 #if ((__ARM_ARCH_7M__      == 1U) || \
00031     (__ARM_ARCH_7EM__     == 1U) || \
00032     (__ARM_ARCH_8M_BASE__ == 1U) || \
00033     (__ARM_ARCH_8M_MAIN__ == 1U)) || \
00034     (__ARM_ARCH_7A__ == 1U)
00035 #define MBED_EXCLUSIVE_ACCESS      1U
00036 #elif (__ARM_ARCH_6M__ == 1U)
00037 #define MBED_EXCLUSIVE_ACCESS      0U
00038 #else
00039 #error "Unknown architecture for exclusive access"
00040 #endif
00041 #else 
00042 #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
00043 #endif
00044 #endif
00045 
00046 static volatile uint32_t critical_section_reentrancy_counter = 0;
00047 
00048 bool core_util_are_interrupts_enabled(void)
00049 {
00050 #if defined(__CORTEX_A9)
00051     return ((__get_CPSR() & 0x80) == 0);
00052 #else
00053     return ((__get_PRIMASK() & 0x1) == 0);
00054 #endif
00055 }
00056 
00057 bool core_util_is_isr_active(void)
00058 {
00059 #if defined(__CORTEX_A9)
00060     switch(__get_CPSR() & 0x1FU) {
00061         case CPSR_M_USR:
00062         case CPSR_M_SYS:
00063             return false;
00064         case CPSR_M_SVC:
00065         default:
00066             return true;
00067     }
00068 #else
00069     return (__get_IPSR() != 0U);
00070 #endif
00071 }
00072 
00073 bool core_util_in_critical_section(void)
00074 {
00075     return hal_in_critical_section();
00076 }
00077 
00078 void core_util_critical_section_enter(void)
00079 {
00080 // FIXME
00081 #ifdef FEATURE_UVISOR
00082     #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
00083 #else
00084     // If the reentrancy counter overflows something has gone badly wrong.
00085     MBED_ASSERT(critical_section_reentrancy_counter < UINT32_MAX);
00086 #endif /* FEATURE_UVISOR */
00087 
00088     hal_critical_section_enter();
00089 
00090     ++critical_section_reentrancy_counter;
00091 }
00092 
00093 void core_util_critical_section_exit(void)
00094 {
00095 // FIXME
00096 #ifdef FEATURE_UVISOR
00097     #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
00098 #endif /* FEATURE_UVISOR */
00099 
00100     // If critical_section_enter has not previously been called, do nothing
00101     if (critical_section_reentrancy_counter == 0) {
00102         return;
00103     }
00104 
00105     --critical_section_reentrancy_counter;
00106 
00107     if (critical_section_reentrancy_counter == 0) {
00108         hal_critical_section_exit();
00109     }
00110 }
00111 
00112 #if MBED_EXCLUSIVE_ACCESS
00113 
00114 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
00115 #if defined (__CC_ARM) 
00116 #pragma diag_suppress 3731
00117 #endif
00118 
00119 bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00120 {
00121     do {
00122         uint8_t currentValue = __LDREXB(ptr);
00123         if (currentValue != *expectedCurrentValue) {
00124             *expectedCurrentValue = currentValue;
00125             __CLREX();
00126             return false;
00127         }
00128     } while (__STREXB(desiredValue, ptr));
00129     return true;
00130 }
00131 
00132 bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00133 {
00134     do {
00135         uint16_t currentValue = __LDREXH(ptr);
00136         if (currentValue != *expectedCurrentValue) {
00137             *expectedCurrentValue = currentValue;
00138             __CLREX();
00139             return false;
00140         }
00141     } while (__STREXH(desiredValue, ptr));
00142     return true;
00143 }
00144 
00145 
00146 bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00147 {
00148     do {
00149         uint32_t currentValue = __LDREXW(ptr);
00150         if (currentValue != *expectedCurrentValue) {
00151             *expectedCurrentValue = currentValue;
00152             __CLREX();
00153             return false;
00154         }
00155     } while (__STREXW(desiredValue, ptr));
00156     return true;
00157 }
00158 
00159 uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
00160 {
00161     uint8_t newValue;
00162     do {
00163         newValue = __LDREXB(valuePtr) + delta;
00164     } while (__STREXB(newValue, valuePtr));
00165     return newValue;
00166 }
00167 
00168 uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
00169 {
00170     uint16_t newValue;
00171     do {
00172         newValue = __LDREXH(valuePtr) + delta;
00173     } while (__STREXH(newValue, valuePtr));
00174     return newValue;
00175 }
00176 
00177 uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
00178 {
00179     uint32_t newValue;
00180     do {
00181         newValue = __LDREXW(valuePtr) + delta;
00182     } while (__STREXW(newValue, valuePtr));
00183     return newValue;
00184 }
00185 
00186 
00187 uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
00188 {
00189     uint8_t newValue;
00190     do {
00191         newValue = __LDREXB(valuePtr) - delta;
00192     } while (__STREXB(newValue, valuePtr));
00193     return newValue;
00194 }
00195 
00196 uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
00197 {
00198     uint16_t newValue;
00199     do {
00200         newValue = __LDREXH(valuePtr) - delta;
00201     } while (__STREXH(newValue, valuePtr));
00202     return newValue;
00203 }
00204 
00205 uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
00206 {
00207     uint32_t newValue;
00208     do {
00209         newValue = __LDREXW(valuePtr) - delta;
00210     } while (__STREXW(newValue, valuePtr));
00211     return newValue;
00212 }
00213 
00214 #else
00215 
00216 bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00217 {
00218     bool success;
00219     uint8_t currentValue;
00220     core_util_critical_section_enter();
00221     currentValue = *ptr;
00222     if (currentValue == *expectedCurrentValue) {
00223         *ptr = desiredValue;
00224         success = true;
00225     } else {
00226         *expectedCurrentValue = currentValue;
00227         success = false;
00228     }
00229     core_util_critical_section_exit();
00230     return success;
00231 }
00232 
00233 bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00234 {
00235     bool success;
00236     uint16_t currentValue;
00237     core_util_critical_section_enter();
00238     currentValue = *ptr;
00239     if (currentValue == *expectedCurrentValue) {
00240         *ptr = desiredValue;
00241         success = true;
00242     } else {
00243         *expectedCurrentValue = currentValue;
00244         success = false;
00245     }
00246     core_util_critical_section_exit();
00247     return success;
00248 }
00249 
00250 
00251 bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00252 {
00253     bool success;
00254     uint32_t currentValue;
00255     core_util_critical_section_enter();
00256     currentValue = *ptr;
00257     if (currentValue == *expectedCurrentValue) {
00258         *ptr = desiredValue;
00259         success = true;
00260     } else {
00261         *expectedCurrentValue = currentValue;
00262         success = false;
00263     }
00264     core_util_critical_section_exit();
00265     return success;
00266 }
00267 
00268 
00269 uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
00270 {
00271     uint8_t newValue;
00272     core_util_critical_section_enter();
00273     newValue = *valuePtr + delta;
00274     *valuePtr = newValue;
00275     core_util_critical_section_exit();
00276     return newValue;
00277 }
00278 
00279 uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
00280 {
00281     uint16_t newValue;
00282     core_util_critical_section_enter();
00283     newValue = *valuePtr + delta;
00284     *valuePtr = newValue;
00285     core_util_critical_section_exit();
00286     return newValue;
00287 }
00288 
00289 uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
00290 {
00291     uint32_t newValue;
00292     core_util_critical_section_enter();
00293     newValue = *valuePtr + delta;
00294     *valuePtr = newValue;
00295     core_util_critical_section_exit();
00296     return newValue;
00297 }
00298 
00299 
00300 uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
00301 {
00302     uint8_t newValue;
00303     core_util_critical_section_enter();
00304     newValue = *valuePtr - delta;
00305     *valuePtr = newValue;
00306     core_util_critical_section_exit();
00307     return newValue;
00308 }
00309 
00310 uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
00311 {
00312     uint16_t newValue;
00313     core_util_critical_section_enter();
00314     newValue = *valuePtr - delta;
00315     *valuePtr = newValue;
00316     core_util_critical_section_exit();
00317     return newValue;
00318 }
00319 
00320 uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
00321 {
00322     uint32_t newValue;
00323     core_util_critical_section_enter();
00324     newValue = *valuePtr - delta;
00325     *valuePtr = newValue;
00326     core_util_critical_section_exit();
00327     return newValue;
00328 }
00329 
00330 #endif
00331 
00332 
00333 bool core_util_atomic_cas_ptr(void * volatile *ptr, void **expectedCurrentValue, void *desiredValue) {
00334     return core_util_atomic_cas_u32(
00335             (volatile uint32_t *)ptr,
00336             (uint32_t *)expectedCurrentValue,
00337             (uint32_t)desiredValue);
00338 }
00339 
00340 void *core_util_atomic_incr_ptr(void * volatile *valuePtr, ptrdiff_t delta) {
00341     return (void *)core_util_atomic_incr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
00342 }
00343 
00344 void *core_util_atomic_decr_ptr(void * volatile *valuePtr, ptrdiff_t delta) {
00345     return (void *)core_util_atomic_decr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
00346 }
00347