Mouse code for the MacroRat

Dependencies:   ITG3200 QEI

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_critical.c Source File

mbed_critical.c

00001 /*
00002  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
00019 #define __STDC_LIMIT_MACROS
00020 #include "platform/mbed_critical.h"
00021 
00022 #include "cmsis.h"
00023 #include "platform/mbed_assert.h"
00024 #include "platform/mbed_toolchain.h"
00025 
00026 #define EXCLUSIVE_ACCESS (!defined (__CORTEX_M0) && !defined (__CORTEX_M0PLUS))
00027 
00028 static volatile uint32_t interrupt_enable_counter = 0;
00029 static volatile bool critical_interrupts_disabled = false;
00030 
00031 bool core_util_are_interrupts_enabled(void)
00032 {
00033 #if defined(__CORTEX_A9)
00034     return ((__get_CPSR() & 0x80) == 0);
00035 #else
00036     return ((__get_PRIMASK() & 0x1) == 0);
00037 #endif
00038 }
00039 
00040 MBED_WEAK void core_util_critical_section_enter(void)
00041 {
00042     bool interrupts_disabled = !core_util_are_interrupts_enabled();
00043     __disable_irq();
00044 
00045     /* Save the interrupt disabled state as it was prior to any nested critical section lock use */
00046     if (!interrupt_enable_counter) {
00047         critical_interrupts_disabled = interrupts_disabled;
00048     }
00049 
00050     /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
00051        are enabled, then something has gone badly wrong thus assert an error.
00052     */
00053     MBED_ASSERT(interrupt_enable_counter < UINT32_MAX); 
00054 // FIXME
00055 #ifndef   FEATURE_UVISOR
00056     if (interrupt_enable_counter > 0) {
00057         MBED_ASSERT(interrupts_disabled);
00058     }
00059 #else
00060 #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
00061 #endif /* FEATURE_UVISOR */
00062     interrupt_enable_counter++;
00063 }
00064 
00065 MBED_WEAK void core_util_critical_section_exit(void)
00066 {
00067     /* If critical_section_enter has not previously been called, do nothing */
00068     if (interrupt_enable_counter) {
00069 
00070 // FIXME
00071 #ifndef   FEATURE_UVISOR
00072         bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
00073 
00074         MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
00075 #else
00076 #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
00077 #endif /* FEATURE_UVISOR */
00078 
00079         interrupt_enable_counter--;
00080 
00081         /* Only re-enable interrupts if we are exiting the last of the nested critical sections and
00082            interrupts were enabled on entry to the first critical section.
00083         */
00084         if (!interrupt_enable_counter && !critical_interrupts_disabled) {
00085             __enable_irq();
00086         }
00087     }
00088 }
00089 
00090 #if EXCLUSIVE_ACCESS
00091 
00092 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
00093 #if defined (__CC_ARM) 
00094 #pragma diag_suppress 3731
00095 #endif
00096 
00097 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00098 {
00099     uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr);
00100     if (currentValue != *expectedCurrentValue) {
00101         *expectedCurrentValue = currentValue;
00102         __CLREX();
00103         return false;
00104     }
00105 
00106     return !__STREXB(desiredValue, (volatile uint8_t*)ptr);
00107 }
00108 
00109 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00110 {
00111     uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr);
00112     if (currentValue != *expectedCurrentValue) {
00113         *expectedCurrentValue = currentValue;
00114         __CLREX();
00115         return false;
00116     }
00117 
00118     return !__STREXH(desiredValue, (volatile uint16_t*)ptr);
00119 }
00120 
00121 
00122 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00123 {
00124     uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr);
00125     if (currentValue != *expectedCurrentValue) {
00126         *expectedCurrentValue = currentValue;
00127         __CLREX();
00128         return false;
00129     }
00130 
00131     return !__STREXW(desiredValue, (volatile uint32_t*)ptr);
00132 }
00133 
00134 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00135 {
00136     uint8_t newValue;
00137     do {
00138         newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta;
00139     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00140     return newValue;
00141 }
00142 
00143 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00144 {
00145     uint16_t newValue;
00146     do {
00147         newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta;
00148     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00149     return newValue;
00150 }
00151 
00152 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00153 {
00154     uint32_t newValue;
00155     do {
00156         newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta;
00157     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00158     return newValue;
00159 }
00160 
00161 
00162 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00163 {
00164     uint8_t newValue;
00165     do {
00166         newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta;
00167     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00168     return newValue;
00169 }
00170 
00171 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00172 {
00173     uint16_t newValue;
00174     do {
00175         newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta;
00176     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00177     return newValue;
00178 }
00179 
00180 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00181 {
00182     uint32_t newValue;
00183     do {
00184         newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta;
00185     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00186     return newValue;
00187 }
00188 
00189 #else
00190 
00191 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00192 {
00193     bool success;
00194     uint8_t currentValue;
00195     core_util_critical_section_enter();
00196     currentValue = *ptr;
00197     if (currentValue == *expectedCurrentValue) {
00198         *ptr = desiredValue;
00199         success = true;
00200     } else {
00201         *expectedCurrentValue = currentValue;
00202         success = false;
00203     }
00204     core_util_critical_section_exit();
00205     return success;
00206 }
00207 
00208 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00209 {
00210     bool success;
00211     uint16_t currentValue;
00212     core_util_critical_section_enter();
00213     currentValue = *ptr;
00214     if (currentValue == *expectedCurrentValue) {
00215         *ptr = desiredValue;
00216         success = true;
00217     } else {
00218         *expectedCurrentValue = currentValue;
00219         success = false;
00220     }
00221     core_util_critical_section_exit();
00222     return success;
00223 }
00224 
00225 
00226 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00227 {
00228     bool success;
00229     uint32_t currentValue;
00230     core_util_critical_section_enter();
00231     currentValue = *ptr;
00232     if (currentValue == *expectedCurrentValue) {
00233         *ptr = desiredValue;
00234         success = true;
00235     } else {
00236         *expectedCurrentValue = currentValue;
00237         success = false;
00238     }
00239     core_util_critical_section_exit();
00240     return success;
00241 }
00242 
00243 
00244 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00245 {
00246     uint8_t newValue;
00247     core_util_critical_section_enter();
00248     newValue = *valuePtr + delta;
00249     *valuePtr = newValue;
00250     core_util_critical_section_exit();
00251     return newValue;
00252 }
00253 
00254 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00255 {
00256     uint16_t newValue;
00257     core_util_critical_section_enter();
00258     newValue = *valuePtr + delta;
00259     *valuePtr = newValue;
00260     core_util_critical_section_exit();
00261     return newValue;
00262 }
00263 
00264 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00265 {
00266     uint32_t newValue;
00267     core_util_critical_section_enter();
00268     newValue = *valuePtr + delta;
00269     *valuePtr = newValue;
00270     core_util_critical_section_exit();
00271     return newValue;
00272 }
00273 
00274 
00275 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00276 {
00277     uint8_t newValue;
00278     core_util_critical_section_enter();
00279     newValue = *valuePtr - delta;
00280     *valuePtr = newValue;
00281     core_util_critical_section_exit();
00282     return newValue;
00283 }
00284 
00285 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00286 {
00287     uint16_t newValue;
00288     core_util_critical_section_enter();
00289     newValue = *valuePtr - delta;
00290     *valuePtr = newValue;
00291     core_util_critical_section_exit();
00292     return newValue;
00293 }
00294 
00295 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00296 {
00297     uint32_t newValue;
00298     core_util_critical_section_enter();
00299     newValue = *valuePtr - delta;
00300     *valuePtr = newValue;
00301     core_util_critical_section_exit();
00302     return newValue;
00303 }
00304 
00305 #endif
00306 
00307 
00308 bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) {
00309     return core_util_atomic_cas_u32(
00310             (uint32_t *)ptr,
00311             (uint32_t *)expectedCurrentValue,
00312             (uint32_t)desiredValue);
00313 }
00314 
00315 void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) {
00316     return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00317 }
00318 
00319 void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) {
00320     return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00321 }
00322