Rtos API example

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_critical.c Source File

mbed_critical.c

00001 /*
00002  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
00019 #define __STDC_LIMIT_MACROS
00020 #include "platform/mbed_critical.h"
00021 
00022 #include "cmsis.h"
00023 #include "platform/mbed_assert.h"
00024 #include "platform/mbed_toolchain.h"
00025 
00026 static volatile uint32_t interrupt_enable_counter = 0;
00027 static volatile bool critical_interrupts_disabled = false;
00028 
00029 bool core_util_are_interrupts_enabled(void)
00030 {
00031 #if defined(__CORTEX_A9)
00032     return ((__get_CPSR() & 0x80) == 0);
00033 #else
00034     return ((__get_PRIMASK() & 0x1) == 0);
00035 #endif
00036 }
00037 
00038 bool core_util_is_isr_active(void)
00039 {
00040 #if defined(__CORTEX_A9)
00041     switch(__get_CPSR() & 0x1FU) {
00042         case CPSR_M_USR:
00043         case CPSR_M_SYS:
00044             return false;
00045         case CPSR_M_SVC:
00046         default:
00047             return true;
00048     }
00049 #else
00050     return (__get_IPSR() != 0U);
00051 #endif
00052 }
00053 
00054 MBED_WEAK void core_util_critical_section_enter(void)
00055 {
00056     bool interrupts_disabled = !core_util_are_interrupts_enabled();
00057     __disable_irq();
00058 
00059     /* Save the interrupt disabled state as it was prior to any nested critical section lock use */
00060     if (!interrupt_enable_counter) {
00061         critical_interrupts_disabled = interrupts_disabled;
00062     }
00063 
00064     /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
00065        are enabled, then something has gone badly wrong thus assert an error.
00066     */
00067     MBED_ASSERT(interrupt_enable_counter < UINT32_MAX); 
00068 // FIXME
00069 #ifndef   FEATURE_UVISOR
00070     if (interrupt_enable_counter > 0) {
00071         MBED_ASSERT(interrupts_disabled);
00072     }
00073 #else
00074 #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
00075 #endif /* FEATURE_UVISOR */
00076     interrupt_enable_counter++;
00077 }
00078 
00079 MBED_WEAK void core_util_critical_section_exit(void)
00080 {
00081     /* If critical_section_enter has not previously been called, do nothing */
00082     if (interrupt_enable_counter) {
00083 
00084 // FIXME
00085 #ifndef   FEATURE_UVISOR
00086         bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
00087 
00088         MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
00089 #else
00090 #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
00091 #endif /* FEATURE_UVISOR */
00092 
00093         interrupt_enable_counter--;
00094 
00095         /* Only re-enable interrupts if we are exiting the last of the nested critical sections and
00096            interrupts were enabled on entry to the first critical section.
00097         */
00098         if (!interrupt_enable_counter && !critical_interrupts_disabled) {
00099             __enable_irq();
00100         }
00101     }
00102 }
00103 
00104 #if __EXCLUSIVE_ACCESS
00105 
00106 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
00107 #if defined (__CC_ARM) 
00108 #pragma diag_suppress 3731
00109 #endif
00110 
00111 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00112 {
00113     uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr);
00114     if (currentValue != *expectedCurrentValue) {
00115         *expectedCurrentValue = currentValue;
00116         __CLREX();
00117         return false;
00118     }
00119 
00120     return !__STREXB(desiredValue, (volatile uint8_t*)ptr);
00121 }
00122 
00123 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00124 {
00125     uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr);
00126     if (currentValue != *expectedCurrentValue) {
00127         *expectedCurrentValue = currentValue;
00128         __CLREX();
00129         return false;
00130     }
00131 
00132     return !__STREXH(desiredValue, (volatile uint16_t*)ptr);
00133 }
00134 
00135 
00136 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00137 {
00138     uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr);
00139     if (currentValue != *expectedCurrentValue) {
00140         *expectedCurrentValue = currentValue;
00141         __CLREX();
00142         return false;
00143     }
00144 
00145     return !__STREXW(desiredValue, (volatile uint32_t*)ptr);
00146 }
00147 
00148 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00149 {
00150     uint8_t newValue;
00151     do {
00152         newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta;
00153     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00154     return newValue;
00155 }
00156 
00157 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00158 {
00159     uint16_t newValue;
00160     do {
00161         newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta;
00162     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00163     return newValue;
00164 }
00165 
00166 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00167 {
00168     uint32_t newValue;
00169     do {
00170         newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta;
00171     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00172     return newValue;
00173 }
00174 
00175 
00176 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00177 {
00178     uint8_t newValue;
00179     do {
00180         newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta;
00181     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00182     return newValue;
00183 }
00184 
00185 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00186 {
00187     uint16_t newValue;
00188     do {
00189         newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta;
00190     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00191     return newValue;
00192 }
00193 
00194 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00195 {
00196     uint32_t newValue;
00197     do {
00198         newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta;
00199     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00200     return newValue;
00201 }
00202 
00203 #else
00204 
00205 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00206 {
00207     bool success;
00208     uint8_t currentValue;
00209     core_util_critical_section_enter();
00210     currentValue = *ptr;
00211     if (currentValue == *expectedCurrentValue) {
00212         *ptr = desiredValue;
00213         success = true;
00214     } else {
00215         *expectedCurrentValue = currentValue;
00216         success = false;
00217     }
00218     core_util_critical_section_exit();
00219     return success;
00220 }
00221 
00222 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00223 {
00224     bool success;
00225     uint16_t currentValue;
00226     core_util_critical_section_enter();
00227     currentValue = *ptr;
00228     if (currentValue == *expectedCurrentValue) {
00229         *ptr = desiredValue;
00230         success = true;
00231     } else {
00232         *expectedCurrentValue = currentValue;
00233         success = false;
00234     }
00235     core_util_critical_section_exit();
00236     return success;
00237 }
00238 
00239 
00240 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00241 {
00242     bool success;
00243     uint32_t currentValue;
00244     core_util_critical_section_enter();
00245     currentValue = *ptr;
00246     if (currentValue == *expectedCurrentValue) {
00247         *ptr = desiredValue;
00248         success = true;
00249     } else {
00250         *expectedCurrentValue = currentValue;
00251         success = false;
00252     }
00253     core_util_critical_section_exit();
00254     return success;
00255 }
00256 
00257 
00258 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00259 {
00260     uint8_t newValue;
00261     core_util_critical_section_enter();
00262     newValue = *valuePtr + delta;
00263     *valuePtr = newValue;
00264     core_util_critical_section_exit();
00265     return newValue;
00266 }
00267 
00268 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00269 {
00270     uint16_t newValue;
00271     core_util_critical_section_enter();
00272     newValue = *valuePtr + delta;
00273     *valuePtr = newValue;
00274     core_util_critical_section_exit();
00275     return newValue;
00276 }
00277 
00278 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00279 {
00280     uint32_t newValue;
00281     core_util_critical_section_enter();
00282     newValue = *valuePtr + delta;
00283     *valuePtr = newValue;
00284     core_util_critical_section_exit();
00285     return newValue;
00286 }
00287 
00288 
00289 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00290 {
00291     uint8_t newValue;
00292     core_util_critical_section_enter();
00293     newValue = *valuePtr - delta;
00294     *valuePtr = newValue;
00295     core_util_critical_section_exit();
00296     return newValue;
00297 }
00298 
00299 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00300 {
00301     uint16_t newValue;
00302     core_util_critical_section_enter();
00303     newValue = *valuePtr - delta;
00304     *valuePtr = newValue;
00305     core_util_critical_section_exit();
00306     return newValue;
00307 }
00308 
00309 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00310 {
00311     uint32_t newValue;
00312     core_util_critical_section_enter();
00313     newValue = *valuePtr - delta;
00314     *valuePtr = newValue;
00315     core_util_critical_section_exit();
00316     return newValue;
00317 }
00318 
00319 #endif
00320 
00321 
00322 bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) {
00323     return core_util_atomic_cas_u32(
00324             (uint32_t *)ptr,
00325             (uint32_t *)expectedCurrentValue,
00326             (uint32_t)desiredValue);
00327 }
00328 
00329 void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) {
00330     return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00331 }
00332 
00333 void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) {
00334     return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00335 }
00336