Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Fork of gr-peach-opencv-project-sd-card by
mbed_critical.c
00001 /* 00002 * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved 00003 * SPDX-License-Identifier: Apache-2.0 00004 * 00005 * Licensed under the Apache License, Version 2.0 (the "License"); you may 00006 * not use this file except in compliance with the License. 00007 * You may obtain a copy of the License at 00008 * 00009 * http://www.apache.org/licenses/LICENSE-2.0 00010 * 00011 * Unless required by applicable law or agreed to in writing, software 00012 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 00013 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 * See the License for the specific language governing permissions and 00015 * limitations under the License. 00016 */ 00017 00018 /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */ 00019 #define __STDC_LIMIT_MACROS 00020 #include "platform/mbed_critical.h" 00021 00022 #include "cmsis.h" 00023 #include "platform/mbed_assert.h" 00024 #include "platform/mbed_toolchain.h" 00025 00026 #define EXCLUSIVE_ACCESS (!defined (__CORTEX_M0) && !defined (__CORTEX_M0PLUS)) 00027 00028 static volatile uint32_t interrupt_enable_counter = 0; 00029 static volatile bool critical_interrupts_disabled = false; 00030 00031 bool core_util_are_interrupts_enabled(void) 00032 { 00033 #if defined(__CORTEX_A9) 00034 return ((__get_CPSR() & 0x80) == 0); 00035 #else 00036 return ((__get_PRIMASK() & 0x1) == 0); 00037 #endif 00038 } 00039 00040 bool core_util_is_isr_active(void) 00041 { 00042 #if defined(__CORTEX_A9) 00043 switch(__get_CPSR() & 0x1FU) { 00044 case MODE_USR: 00045 case MODE_SYS: 00046 return false; 00047 case MODE_SVC: 00048 default: 00049 return true; 00050 } 00051 #else 00052 return (__get_IPSR() != 0U); 00053 #endif 00054 } 00055 00056 MBED_WEAK void core_util_critical_section_enter(void) 00057 { 00058 bool interrupts_disabled = !core_util_are_interrupts_enabled(); 00059 __disable_irq(); 00060 00061 /* Save the interrupt disabled state as it was prior to any nested critical section lock use */ 00062 if (!interrupt_enable_counter) { 00063 critical_interrupts_disabled = interrupts_disabled; 00064 } 00065 00066 /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts 00067 are enabled, then something has gone badly wrong thus assert an error. 00068 */ 00069 MBED_ASSERT(interrupt_enable_counter < UINT32_MAX); 00070 // FIXME 00071 #ifndef FEATURE_UVISOR 00072 if (interrupt_enable_counter > 0) { 00073 MBED_ASSERT(interrupts_disabled); 00074 } 00075 #else 00076 #warning "core_util_critical_section_enter needs fixing to work from unprivileged code" 00077 #endif /* FEATURE_UVISOR */ 00078 interrupt_enable_counter++; 00079 } 00080 00081 MBED_WEAK void core_util_critical_section_exit(void) 00082 { 00083 /* If critical_section_enter has not previously been called, do nothing */ 00084 if (interrupt_enable_counter) { 00085 00086 // FIXME 00087 #ifndef FEATURE_UVISOR 00088 bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */ 00089 00090 MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */ 00091 #else 00092 #warning "core_util_critical_section_exit needs fixing to work from unprivileged code" 00093 #endif /* FEATURE_UVISOR */ 00094 00095 interrupt_enable_counter--; 00096 00097 /* Only re-enable interrupts if we are exiting the last of the nested critical sections and 00098 interrupts were enabled on entry to the first critical section. 00099 */ 00100 if (!interrupt_enable_counter && !critical_interrupts_disabled) { 00101 __enable_irq(); 00102 } 00103 } 00104 } 00105 00106 #if EXCLUSIVE_ACCESS 00107 00108 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */ 00109 #if defined (__CC_ARM) 00110 #pragma diag_suppress 3731 00111 #endif 00112 00113 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue) 00114 { 00115 uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr); 00116 if (currentValue != *expectedCurrentValue) { 00117 *expectedCurrentValue = currentValue; 00118 __CLREX(); 00119 return false; 00120 } 00121 00122 return !__STREXB(desiredValue, (volatile uint8_t*)ptr); 00123 } 00124 00125 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue) 00126 { 00127 uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr); 00128 if (currentValue != *expectedCurrentValue) { 00129 *expectedCurrentValue = currentValue; 00130 __CLREX(); 00131 return false; 00132 } 00133 00134 return !__STREXH(desiredValue, (volatile uint16_t*)ptr); 00135 } 00136 00137 00138 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue) 00139 { 00140 uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr); 00141 if (currentValue != *expectedCurrentValue) { 00142 *expectedCurrentValue = currentValue; 00143 __CLREX(); 00144 return false; 00145 } 00146 00147 return !__STREXW(desiredValue, (volatile uint32_t*)ptr); 00148 } 00149 00150 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta) 00151 { 00152 uint8_t newValue; 00153 do { 00154 newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta; 00155 } while (__STREXB(newValue, (volatile uint8_t*)valuePtr)); 00156 return newValue; 00157 } 00158 00159 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta) 00160 { 00161 uint16_t newValue; 00162 do { 00163 newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta; 00164 } while (__STREXH(newValue, (volatile uint16_t*)valuePtr)); 00165 return newValue; 00166 } 00167 00168 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta) 00169 { 00170 uint32_t newValue; 00171 do { 00172 newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta; 00173 } while (__STREXW(newValue, (volatile uint32_t*)valuePtr)); 00174 return newValue; 00175 } 00176 00177 00178 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta) 00179 { 00180 uint8_t newValue; 00181 do { 00182 newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta; 00183 } while (__STREXB(newValue, (volatile uint8_t*)valuePtr)); 00184 return newValue; 00185 } 00186 00187 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta) 00188 { 00189 uint16_t newValue; 00190 do { 00191 newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta; 00192 } while (__STREXH(newValue, (volatile uint16_t*)valuePtr)); 00193 return newValue; 00194 } 00195 00196 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta) 00197 { 00198 uint32_t newValue; 00199 do { 00200 newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta; 00201 } while (__STREXW(newValue, (volatile uint32_t*)valuePtr)); 00202 return newValue; 00203 } 00204 00205 #else 00206 00207 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue) 00208 { 00209 bool success; 00210 uint8_t currentValue; 00211 core_util_critical_section_enter(); 00212 currentValue = *ptr; 00213 if (currentValue == *expectedCurrentValue) { 00214 *ptr = desiredValue; 00215 success = true; 00216 } else { 00217 *expectedCurrentValue = currentValue; 00218 success = false; 00219 } 00220 core_util_critical_section_exit(); 00221 return success; 00222 } 00223 00224 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue) 00225 { 00226 bool success; 00227 uint16_t currentValue; 00228 core_util_critical_section_enter(); 00229 currentValue = *ptr; 00230 if (currentValue == *expectedCurrentValue) { 00231 *ptr = desiredValue; 00232 success = true; 00233 } else { 00234 *expectedCurrentValue = currentValue; 00235 success = false; 00236 } 00237 core_util_critical_section_exit(); 00238 return success; 00239 } 00240 00241 00242 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue) 00243 { 00244 bool success; 00245 uint32_t currentValue; 00246 core_util_critical_section_enter(); 00247 currentValue = *ptr; 00248 if (currentValue == *expectedCurrentValue) { 00249 *ptr = desiredValue; 00250 success = true; 00251 } else { 00252 *expectedCurrentValue = currentValue; 00253 success = false; 00254 } 00255 core_util_critical_section_exit(); 00256 return success; 00257 } 00258 00259 00260 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta) 00261 { 00262 uint8_t newValue; 00263 core_util_critical_section_enter(); 00264 newValue = *valuePtr + delta; 00265 *valuePtr = newValue; 00266 core_util_critical_section_exit(); 00267 return newValue; 00268 } 00269 00270 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta) 00271 { 00272 uint16_t newValue; 00273 core_util_critical_section_enter(); 00274 newValue = *valuePtr + delta; 00275 *valuePtr = newValue; 00276 core_util_critical_section_exit(); 00277 return newValue; 00278 } 00279 00280 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta) 00281 { 00282 uint32_t newValue; 00283 core_util_critical_section_enter(); 00284 newValue = *valuePtr + delta; 00285 *valuePtr = newValue; 00286 core_util_critical_section_exit(); 00287 return newValue; 00288 } 00289 00290 00291 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta) 00292 { 00293 uint8_t newValue; 00294 core_util_critical_section_enter(); 00295 newValue = *valuePtr - delta; 00296 *valuePtr = newValue; 00297 core_util_critical_section_exit(); 00298 return newValue; 00299 } 00300 00301 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta) 00302 { 00303 uint16_t newValue; 00304 core_util_critical_section_enter(); 00305 newValue = *valuePtr - delta; 00306 *valuePtr = newValue; 00307 core_util_critical_section_exit(); 00308 return newValue; 00309 } 00310 00311 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta) 00312 { 00313 uint32_t newValue; 00314 core_util_critical_section_enter(); 00315 newValue = *valuePtr - delta; 00316 *valuePtr = newValue; 00317 core_util_critical_section_exit(); 00318 return newValue; 00319 } 00320 00321 #endif 00322 00323 00324 bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) { 00325 return core_util_atomic_cas_u32( 00326 (uint32_t *)ptr, 00327 (uint32_t *)expectedCurrentValue, 00328 (uint32_t)desiredValue); 00329 } 00330 00331 void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) { 00332 return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta); 00333 } 00334 00335 void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) { 00336 return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta); 00337 } 00338 00339
Generated on Tue Jul 12 2022 14:47:25 by
