EL4121 Embedded System / mbed-os

Dependents:   cobaLCDJoyMotor_Thread odometry_omni_3roda_v3 odometry_omni_3roda_v1 odometry_omni_3roda_v2 ... more

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_critical.c Source File

mbed_critical.c

00001 /*
00002  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
00019 #define __STDC_LIMIT_MACROS
00020 #include "platform/mbed_critical.h"
00021 
00022 #include "cmsis.h"
00023 #include "platform/mbed_assert.h"
00024 #include "platform/mbed_toolchain.h"
00025 
00026 static volatile uint32_t interrupt_enable_counter = 0;
00027 static volatile bool critical_interrupts_disabled = false;
00028 
00029 bool core_util_are_interrupts_enabled(void)
00030 {
00031 #if defined(__CORTEX_A9)
00032     return ((__get_CPSR() & 0x80) == 0);
00033 #else
00034     return ((__get_PRIMASK() & 0x1) == 0);
00035 #endif
00036 }
00037 
00038 bool core_util_is_isr_active(void)
00039 {
00040 #if defined(__CORTEX_A9)
00041     switch(__get_CPSR() & 0x1FU) {
00042         case CPSR_M_USR:
00043         case CPSR_M_SYS:
00044             return false;
00045         case CPSR_M_SVC:
00046         default:
00047             return true;
00048     }
00049 #else
00050     return (__get_IPSR() != 0U);
00051 #endif
00052 }
00053 
00054 MBED_WEAK void core_util_critical_section_enter(void)
00055 {
00056     bool interrupts_disabled = !core_util_are_interrupts_enabled();
00057     __disable_irq();
00058 
00059     /* Save the interrupt disabled state as it was prior to any nested critical section lock use */
00060     if (!interrupt_enable_counter) {
00061         critical_interrupts_disabled = interrupts_disabled;
00062     }
00063 
00064     /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
00065        are enabled, then something has gone badly wrong thus assert an error.
00066     */
00067     MBED_ASSERT(interrupt_enable_counter < UINT32_MAX); 
00068 // FIXME
00069 #ifndef   FEATURE_UVISOR
00070     if (interrupt_enable_counter > 0) {
00071         MBED_ASSERT(interrupts_disabled);
00072     }
00073 #else
00074 #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
00075 #endif /* FEATURE_UVISOR */
00076     interrupt_enable_counter++;
00077 }
00078 
00079 MBED_WEAK void core_util_critical_section_exit(void)
00080 {
00081     /* If critical_section_enter has not previously been called, do nothing */
00082     if (interrupt_enable_counter) {
00083 
00084 // FIXME
00085 #ifndef   FEATURE_UVISOR
00086         bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
00087 
00088         MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
00089 #else
00090 #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
00091 #endif /* FEATURE_UVISOR */
00092 
00093         interrupt_enable_counter--;
00094 
00095         /* Only re-enable interrupts if we are exiting the last of the nested critical sections and
00096            interrupts were enabled on entry to the first critical section.
00097         */
00098         if (!interrupt_enable_counter && !critical_interrupts_disabled) {
00099             __enable_irq();
00100         }
00101     }
00102 }
00103 
00104 #if __EXCLUSIVE_ACCESS
00105 
00106 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
00107 #if defined (__CC_ARM) 
00108 #pragma diag_suppress 3731
00109 #endif
00110 
00111 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00112 {
00113     do {
00114         uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr);
00115         if (currentValue != *expectedCurrentValue) {
00116             *expectedCurrentValue = currentValue;
00117             __CLREX();
00118             return false;
00119         }
00120     } while (__STREXB(desiredValue, (volatile uint8_t*)ptr));
00121     return true;
00122 }
00123 
00124 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00125 {
00126     do {
00127         uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr);
00128         if (currentValue != *expectedCurrentValue) {
00129             *expectedCurrentValue = currentValue;
00130             __CLREX();
00131             return false;
00132         }
00133     } while (__STREXH(desiredValue, (volatile uint16_t*)ptr));
00134     return true;
00135 }
00136 
00137 
00138 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00139 {
00140     do {
00141         uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr);
00142         if (currentValue != *expectedCurrentValue) {
00143             *expectedCurrentValue = currentValue;
00144             __CLREX();
00145             return false;
00146         }
00147     } while (__STREXW(desiredValue, (volatile uint32_t*)ptr));
00148     return true;
00149 }
00150 
00151 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00152 {
00153     uint8_t newValue;
00154     do {
00155         newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta;
00156     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00157     return newValue;
00158 }
00159 
00160 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00161 {
00162     uint16_t newValue;
00163     do {
00164         newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta;
00165     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00166     return newValue;
00167 }
00168 
00169 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00170 {
00171     uint32_t newValue;
00172     do {
00173         newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta;
00174     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00175     return newValue;
00176 }
00177 
00178 
00179 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00180 {
00181     uint8_t newValue;
00182     do {
00183         newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta;
00184     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00185     return newValue;
00186 }
00187 
00188 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00189 {
00190     uint16_t newValue;
00191     do {
00192         newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta;
00193     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00194     return newValue;
00195 }
00196 
00197 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00198 {
00199     uint32_t newValue;
00200     do {
00201         newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta;
00202     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00203     return newValue;
00204 }
00205 
00206 #else
00207 
00208 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00209 {
00210     bool success;
00211     uint8_t currentValue;
00212     core_util_critical_section_enter();
00213     currentValue = *ptr;
00214     if (currentValue == *expectedCurrentValue) {
00215         *ptr = desiredValue;
00216         success = true;
00217     } else {
00218         *expectedCurrentValue = currentValue;
00219         success = false;
00220     }
00221     core_util_critical_section_exit();
00222     return success;
00223 }
00224 
00225 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00226 {
00227     bool success;
00228     uint16_t currentValue;
00229     core_util_critical_section_enter();
00230     currentValue = *ptr;
00231     if (currentValue == *expectedCurrentValue) {
00232         *ptr = desiredValue;
00233         success = true;
00234     } else {
00235         *expectedCurrentValue = currentValue;
00236         success = false;
00237     }
00238     core_util_critical_section_exit();
00239     return success;
00240 }
00241 
00242 
00243 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00244 {
00245     bool success;
00246     uint32_t currentValue;
00247     core_util_critical_section_enter();
00248     currentValue = *ptr;
00249     if (currentValue == *expectedCurrentValue) {
00250         *ptr = desiredValue;
00251         success = true;
00252     } else {
00253         *expectedCurrentValue = currentValue;
00254         success = false;
00255     }
00256     core_util_critical_section_exit();
00257     return success;
00258 }
00259 
00260 
00261 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00262 {
00263     uint8_t newValue;
00264     core_util_critical_section_enter();
00265     newValue = *valuePtr + delta;
00266     *valuePtr = newValue;
00267     core_util_critical_section_exit();
00268     return newValue;
00269 }
00270 
00271 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00272 {
00273     uint16_t newValue;
00274     core_util_critical_section_enter();
00275     newValue = *valuePtr + delta;
00276     *valuePtr = newValue;
00277     core_util_critical_section_exit();
00278     return newValue;
00279 }
00280 
00281 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00282 {
00283     uint32_t newValue;
00284     core_util_critical_section_enter();
00285     newValue = *valuePtr + delta;
00286     *valuePtr = newValue;
00287     core_util_critical_section_exit();
00288     return newValue;
00289 }
00290 
00291 
00292 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00293 {
00294     uint8_t newValue;
00295     core_util_critical_section_enter();
00296     newValue = *valuePtr - delta;
00297     *valuePtr = newValue;
00298     core_util_critical_section_exit();
00299     return newValue;
00300 }
00301 
00302 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00303 {
00304     uint16_t newValue;
00305     core_util_critical_section_enter();
00306     newValue = *valuePtr - delta;
00307     *valuePtr = newValue;
00308     core_util_critical_section_exit();
00309     return newValue;
00310 }
00311 
00312 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00313 {
00314     uint32_t newValue;
00315     core_util_critical_section_enter();
00316     newValue = *valuePtr - delta;
00317     *valuePtr = newValue;
00318     core_util_critical_section_exit();
00319     return newValue;
00320 }
00321 
00322 #endif
00323 
00324 
00325 bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) {
00326     return core_util_atomic_cas_u32(
00327             (uint32_t *)ptr,
00328             (uint32_t *)expectedCurrentValue,
00329             (uint32_t)desiredValue);
00330 }
00331 
00332 void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) {
00333     return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00334 }
00335 
00336 void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) {
00337     return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00338 }
00339