mbed library sources. Supersedes mbed-src.

Dependents:   LPCXpresso1769_blinky

Fork of mbed-dev by mbed official

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_critical.c Source File

mbed_critical.c

00001 /*
00002  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 #include "critical.h"
00019 
00020 #include "cmsis.h"
00021 #include "mbed_assert.h"
00022 
00023 #define EXCLUSIVE_ACCESS (!defined (__CORTEX_M0) && !defined (__CORTEX_M0PLUS))
00024 
00025 static volatile uint32_t interrupt_enable_counter = 0;
00026 static volatile bool critical_interrupts_disabled = false;
00027 
00028 bool core_util_are_interrupts_enabled(void)
00029 {
00030 #if defined(__CORTEX_A9)
00031     return ((__get_CPSR() & 0x80) == 0);
00032 #else
00033     return ((__get_PRIMASK() & 0x1) == 0);
00034 #endif
00035 }
00036 
00037 void core_util_critical_section_enter(void)
00038 {
00039     bool interrupts_disabled = !core_util_are_interrupts_enabled();
00040     __disable_irq();
00041 
00042     /* Save the interrupt disabled state as it was prior to any nested critical section lock use */
00043     if (!interrupt_enable_counter) {
00044         critical_interrupts_disabled = interrupts_disabled;
00045     }
00046 
00047     /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
00048        are enabled, then something has gone badly wrong thus assert an error.
00049     */
00050     MBED_ASSERT(interrupt_enable_counter < UINT32_MAX); 
00051 // FIXME
00052 #ifndef   FEATURE_UVISOR
00053     if (interrupt_enable_counter > 0) {
00054         MBED_ASSERT(interrupts_disabled);
00055     }
00056 #else
00057 #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
00058 #endif /* FEATURE_UVISOR */
00059     interrupt_enable_counter++;
00060 }
00061 
00062 void core_util_critical_section_exit(void)
00063 {
00064     /* If critical_section_enter has not previously been called, do nothing */
00065     if (interrupt_enable_counter) {
00066 
00067 // FIXME
00068 #ifndef   FEATURE_UVISOR
00069         bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
00070 
00071         MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
00072 #else
00073 #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
00074 #endif /* FEATURE_UVISOR */
00075 
00076         interrupt_enable_counter--;
00077 
00078         /* Only re-enable interrupts if we are exiting the last of the nested critical sections and
00079            interrupts were enabled on entry to the first critical section.
00080         */
00081         if (!interrupt_enable_counter && !critical_interrupts_disabled) {
00082             __enable_irq();
00083         }
00084     }
00085 }
00086 
00087 #if EXCLUSIVE_ACCESS
00088 
00089 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
00090 #if defined (__CC_ARM) 
00091 #pragma diag_suppress 3731
00092 #endif
00093 
00094 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00095 {
00096     uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr);
00097     if (currentValue != *expectedCurrentValue) {
00098         *expectedCurrentValue = currentValue;
00099         __CLREX();
00100         return false;
00101     }
00102 
00103     return !__STREXB(desiredValue, (volatile uint8_t*)ptr);
00104 }
00105 
00106 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00107 {
00108     uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr);
00109     if (currentValue != *expectedCurrentValue) {
00110         *expectedCurrentValue = currentValue;
00111         __CLREX();
00112         return false;
00113     }
00114 
00115     return !__STREXH(desiredValue, (volatile uint16_t*)ptr);
00116 }
00117 
00118 
00119 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00120 {
00121     uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr);
00122     if (currentValue != *expectedCurrentValue) {
00123         *expectedCurrentValue = currentValue;
00124         __CLREX();
00125         return false;
00126     }
00127 
00128     return !__STREXW(desiredValue, (volatile uint32_t*)ptr);
00129 }
00130 
00131 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00132 {
00133     uint8_t newValue;
00134     do {
00135         newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta;
00136     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00137     return newValue;
00138 }
00139 
00140 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00141 {
00142     uint16_t newValue;
00143     do {
00144         newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta;
00145     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00146     return newValue;
00147 }
00148 
00149 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00150 {
00151     uint32_t newValue;
00152     do {
00153         newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta;
00154     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00155     return newValue;
00156 }
00157 
00158 
00159 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00160 {
00161     uint8_t newValue;
00162     do {
00163         newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta;
00164     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00165     return newValue;
00166 }
00167 
00168 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00169 {
00170     uint16_t newValue;
00171     do {
00172         newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta;
00173     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00174     return newValue;
00175 }
00176 
00177 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00178 {
00179     uint32_t newValue;
00180     do {
00181         newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta;
00182     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00183     return newValue;
00184 }
00185 
00186 #else
00187 
00188 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00189 {
00190     bool success;
00191     uint8_t currentValue;
00192     core_util_critical_section_enter();
00193     currentValue = *ptr;
00194     if (currentValue == *expectedCurrentValue) {
00195         *ptr = desiredValue;
00196         success = true;
00197     } else {
00198         *expectedCurrentValue = currentValue;
00199         success = false;
00200     }
00201     core_util_critical_section_exit();
00202     return success;
00203 }
00204 
00205 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00206 {
00207     bool success;
00208     uint16_t currentValue;
00209     core_util_critical_section_enter();
00210     currentValue = *ptr;
00211     if (currentValue == *expectedCurrentValue) {
00212         *ptr = desiredValue;
00213         success = true;
00214     } else {
00215         *expectedCurrentValue = currentValue;
00216         success = false;
00217     }
00218     core_util_critical_section_exit();
00219     return success;
00220 }
00221 
00222 
00223 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00224 {
00225     bool success;
00226     uint32_t currentValue;
00227     core_util_critical_section_enter();
00228     currentValue = *ptr;
00229     if (currentValue == *expectedCurrentValue) {
00230         *ptr = desiredValue;
00231         success = true;
00232     } else {
00233         *expectedCurrentValue = currentValue;
00234         success = false;
00235     }
00236     core_util_critical_section_exit();
00237     return success;
00238 }
00239 
00240 
00241 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00242 {
00243     uint8_t newValue;
00244     core_util_critical_section_enter();
00245     newValue = *valuePtr + delta;
00246     *valuePtr = newValue;
00247     core_util_critical_section_exit();
00248     return newValue;
00249 }
00250 
00251 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00252 {
00253     uint16_t newValue;
00254     core_util_critical_section_enter();
00255     newValue = *valuePtr + delta;
00256     *valuePtr = newValue;
00257     core_util_critical_section_exit();
00258     return newValue;
00259 }
00260 
00261 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00262 {
00263     uint32_t newValue;
00264     core_util_critical_section_enter();
00265     newValue = *valuePtr + delta;
00266     *valuePtr = newValue;
00267     core_util_critical_section_exit();
00268     return newValue;
00269 }
00270 
00271 
00272 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00273 {
00274     uint8_t newValue;
00275     core_util_critical_section_enter();
00276     newValue = *valuePtr - delta;
00277     *valuePtr = newValue;
00278     core_util_critical_section_exit();
00279     return newValue;
00280 }
00281 
00282 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00283 {
00284     uint16_t newValue;
00285     core_util_critical_section_enter();
00286     newValue = *valuePtr - delta;
00287     *valuePtr = newValue;
00288     core_util_critical_section_exit();
00289     return newValue;
00290 }
00291 
00292 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00293 {
00294     uint32_t newValue;
00295     core_util_critical_section_enter();
00296     newValue = *valuePtr - delta;
00297     *valuePtr = newValue;
00298     core_util_critical_section_exit();
00299     return newValue;
00300 }
00301 
00302 #endif
00303 
00304 
00305 bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) {
00306     return core_util_atomic_cas_u32(
00307             (uint32_t *)ptr,
00308             (uint32_t *)expectedCurrentValue,
00309             (uint32_t)desiredValue);
00310 }
00311 
00312 void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) {
00313     return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00314 }
00315 
00316 void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) {
00317     return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00318 }
00319