mbed library sources. Supersedes mbed-src.
Fork of mbed-dev by
Embed:
(wiki syntax)
Show/hide line numbers
core_cmInstr.h
Go to the documentation of this file.
00001 /**************************************************************************//** 00002 * @file core_cmInstr.h 00003 * @brief CMSIS Cortex-M Core Instruction Access Header File 00004 * @version V4.10 00005 * @date 18. March 2015 00006 * 00007 * @note 00008 * 00009 ******************************************************************************/ 00010 /* Copyright (c) 2009 - 2014 ARM LIMITED 00011 00012 All rights reserved. 00013 Redistribution and use in source and binary forms, with or without 00014 modification, are permitted provided that the following conditions are met: 00015 - Redistributions of source code must retain the above copyright 00016 notice, this list of conditions and the following disclaimer. 00017 - Redistributions in binary form must reproduce the above copyright 00018 notice, this list of conditions and the following disclaimer in the 00019 documentation and/or other materials provided with the distribution. 00020 - Neither the name of ARM nor the names of its contributors may be used 00021 to endorse or promote products derived from this software without 00022 specific prior written permission. 00023 * 00024 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 00025 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 00026 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 00027 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE 00028 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 00029 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 00030 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 00031 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 00032 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 00033 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 00034 POSSIBILITY OF SUCH DAMAGE. 00035 ---------------------------------------------------------------------------*/ 00036 00037 00038 #ifndef __CORE_CMINSTR_H 00039 #define __CORE_CMINSTR_H 00040 00041 00042 /* ########################## Core Instruction Access ######################### */ 00043 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface 00044 Access to dedicated instructions 00045 @{ 00046 */ 00047 00048 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/ 00049 /* ARM armcc specific functions */ 00050 00051 #if (__ARMCC_VERSION < 400677) 00052 #error "Please use ARM Compiler Toolchain V4.0.677 or later!" 00053 #endif 00054 00055 00056 /** \brief No Operation 00057 00058 No Operation does nothing. This instruction can be used for code alignment purposes. 00059 */ 00060 #define __NOP __nop 00061 00062 00063 /** \brief Wait For Interrupt 00064 00065 Wait For Interrupt is a hint instruction that suspends execution 00066 until one of a number of events occurs. 00067 */ 00068 #define __WFI __wfi 00069 00070 00071 /** \brief Wait For Event 00072 00073 Wait For Event is a hint instruction that permits the processor to enter 00074 a low-power state until one of a number of events occurs. 00075 */ 00076 #define __WFE __wfe 00077 00078 00079 /** \brief Send Event 00080 00081 Send Event is a hint instruction. It causes an event to be signaled to the CPU. 00082 */ 00083 #define __SEV __sev 00084 00085 00086 /** \brief Instruction Synchronization Barrier 00087 00088 Instruction Synchronization Barrier flushes the pipeline in the processor, 00089 so that all instructions following the ISB are fetched from cache or 00090 memory, after the instruction has been completed. 00091 */ 00092 #define __ISB() do {\ 00093 __schedule_barrier();\ 00094 __isb(0xF);\ 00095 __schedule_barrier();\ 00096 } while (0) 00097 00098 /** \brief Data Synchronization Barrier 00099 00100 This function acts as a special kind of Data Memory Barrier. 00101 It completes when all explicit memory accesses before this instruction complete. 00102 */ 00103 #define __DSB() do {\ 00104 __schedule_barrier();\ 00105 __dsb(0xF);\ 00106 __schedule_barrier();\ 00107 } while (0) 00108 00109 /** \brief Data Memory Barrier 00110 00111 This function ensures the apparent order of the explicit memory operations before 00112 and after the instruction, without ensuring their completion. 00113 */ 00114 #define __DMB() do {\ 00115 __schedule_barrier();\ 00116 __dmb(0xF);\ 00117 __schedule_barrier();\ 00118 } while (0) 00119 00120 /** \brief Reverse byte order (32 bit) 00121 00122 This function reverses the byte order in integer value. 00123 00124 \param [in] value Value to reverse 00125 \return Reversed value 00126 */ 00127 #define __REV __rev 00128 00129 00130 /** \brief Reverse byte order (16 bit) 00131 00132 This function reverses the byte order in two unsigned short values. 00133 00134 \param [in] value Value to reverse 00135 \return Reversed value 00136 */ 00137 #ifndef __NO_EMBEDDED_ASM 00138 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value) 00139 { 00140 rev16 r0, r0 00141 bx lr 00142 } 00143 #endif 00144 00145 /** \brief Reverse byte order in signed short value 00146 00147 This function reverses the byte order in a signed short value with sign extension to integer. 00148 00149 \param [in] value Value to reverse 00150 \return Reversed value 00151 */ 00152 #ifndef __NO_EMBEDDED_ASM 00153 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value) 00154 { 00155 revsh r0, r0 00156 bx lr 00157 } 00158 #endif 00159 00160 00161 /** \brief Rotate Right in unsigned value (32 bit) 00162 00163 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. 00164 00165 \param [in] value Value to rotate 00166 \param [in] value Number of Bits to rotate 00167 \return Rotated value 00168 */ 00169 #define __ROR __ror 00170 00171 00172 /** \brief Breakpoint 00173 00174 This function causes the processor to enter Debug state. 00175 Debug tools can use this to investigate system state when the instruction at a particular address is reached. 00176 00177 \param [in] value is ignored by the processor. 00178 If required, a debugger can use it to store additional information about the breakpoint. 00179 */ 00180 #define __BKPT(value) __breakpoint(value) 00181 00182 00183 /** \brief Reverse bit order of value 00184 00185 This function reverses the bit order of the given value. 00186 00187 \param [in] value Value to reverse 00188 \return Reversed value 00189 */ 00190 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) 00191 #define __RBIT __rbit 00192 #else 00193 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value) 00194 { 00195 uint32_t result; 00196 int32_t s = 4 /*sizeof(v)*/ * 8 - 1; // extra shift needed at end 00197 00198 result = value; // r will be reversed bits of v; first get LSB of v 00199 for (value >>= 1; value; value >>= 1) 00200 { 00201 result <<= 1; 00202 result |= value & 1; 00203 s--; 00204 } 00205 result <<= s; // shift when v's highest bits are zero 00206 return(result); 00207 } 00208 #endif 00209 00210 00211 /** \brief Count leading zeros 00212 00213 This function counts the number of leading zeros of a data value. 00214 00215 \param [in] value Value to count the leading zeros 00216 \return number of leading zeros in value 00217 */ 00218 #define __CLZ __clz 00219 00220 00221 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) 00222 00223 /** \brief LDR Exclusive (8 bit) 00224 00225 This function executes a exclusive LDR instruction for 8 bit value. 00226 00227 \param [in] ptr Pointer to data 00228 \return value of type uint8_t at (*ptr) 00229 */ 00230 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr)) 00231 00232 00233 /** \brief LDR Exclusive (16 bit) 00234 00235 This function executes a exclusive LDR instruction for 16 bit values. 00236 00237 \param [in] ptr Pointer to data 00238 \return value of type uint16_t at (*ptr) 00239 */ 00240 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr)) 00241 00242 00243 /** \brief LDR Exclusive (32 bit) 00244 00245 This function executes a exclusive LDR instruction for 32 bit values. 00246 00247 \param [in] ptr Pointer to data 00248 \return value of type uint32_t at (*ptr) 00249 */ 00250 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr)) 00251 00252 00253 /** \brief STR Exclusive (8 bit) 00254 00255 This function executes a exclusive STR instruction for 8 bit values. 00256 00257 \param [in] value Value to store 00258 \param [in] ptr Pointer to location 00259 \return 0 Function succeeded 00260 \return 1 Function failed 00261 */ 00262 #define __STREXB(value, ptr) __strex(value, ptr) 00263 00264 00265 /** \brief STR Exclusive (16 bit) 00266 00267 This function executes a exclusive STR instruction for 16 bit values. 00268 00269 \param [in] value Value to store 00270 \param [in] ptr Pointer to location 00271 \return 0 Function succeeded 00272 \return 1 Function failed 00273 */ 00274 #define __STREXH(value, ptr) __strex(value, ptr) 00275 00276 00277 /** \brief STR Exclusive (32 bit) 00278 00279 This function executes a exclusive STR instruction for 32 bit values. 00280 00281 \param [in] value Value to store 00282 \param [in] ptr Pointer to location 00283 \return 0 Function succeeded 00284 \return 1 Function failed 00285 */ 00286 #define __STREXW(value, ptr) __strex(value, ptr) 00287 00288 00289 /** \brief Remove the exclusive lock 00290 00291 This function removes the exclusive lock which is created by LDREX. 00292 00293 */ 00294 #define __CLREX __clrex 00295 00296 00297 /** \brief Signed Saturate 00298 00299 This function saturates a signed value. 00300 00301 \param [in] value Value to be saturated 00302 \param [in] sat Bit position to saturate to (1..32) 00303 \return Saturated value 00304 */ 00305 #define __SSAT __ssat 00306 00307 00308 /** \brief Unsigned Saturate 00309 00310 This function saturates an unsigned value. 00311 00312 \param [in] value Value to be saturated 00313 \param [in] sat Bit position to saturate to (0..31) 00314 \return Saturated value 00315 */ 00316 #define __USAT __usat 00317 00318 00319 /** \brief Rotate Right with Extend (32 bit) 00320 00321 This function moves each bit of a bitstring right by one bit. 00322 The carry input is shifted in at the left end of the bitstring. 00323 00324 \param [in] value Value to rotate 00325 \return Rotated value 00326 */ 00327 #ifndef __NO_EMBEDDED_ASM 00328 __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value) 00329 { 00330 rrx r0, r0 00331 bx lr 00332 } 00333 #endif 00334 00335 00336 /** \brief LDRT Unprivileged (8 bit) 00337 00338 This function executes a Unprivileged LDRT instruction for 8 bit value. 00339 00340 \param [in] ptr Pointer to data 00341 \return value of type uint8_t at (*ptr) 00342 */ 00343 #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr)) 00344 00345 00346 /** \brief LDRT Unprivileged (16 bit) 00347 00348 This function executes a Unprivileged LDRT instruction for 16 bit values. 00349 00350 \param [in] ptr Pointer to data 00351 \return value of type uint16_t at (*ptr) 00352 */ 00353 #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr)) 00354 00355 00356 /** \brief LDRT Unprivileged (32 bit) 00357 00358 This function executes a Unprivileged LDRT instruction for 32 bit values. 00359 00360 \param [in] ptr Pointer to data 00361 \return value of type uint32_t at (*ptr) 00362 */ 00363 #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr)) 00364 00365 00366 /** \brief STRT Unprivileged (8 bit) 00367 00368 This function executes a Unprivileged STRT instruction for 8 bit values. 00369 00370 \param [in] value Value to store 00371 \param [in] ptr Pointer to location 00372 */ 00373 #define __STRBT(value, ptr) __strt(value, ptr) 00374 00375 00376 /** \brief STRT Unprivileged (16 bit) 00377 00378 This function executes a Unprivileged STRT instruction for 16 bit values. 00379 00380 \param [in] value Value to store 00381 \param [in] ptr Pointer to location 00382 */ 00383 #define __STRHT(value, ptr) __strt(value, ptr) 00384 00385 00386 /** \brief STRT Unprivileged (32 bit) 00387 00388 This function executes a Unprivileged STRT instruction for 32 bit values. 00389 00390 \param [in] value Value to store 00391 \param [in] ptr Pointer to location 00392 */ 00393 #define __STRT(value, ptr) __strt(value, ptr) 00394 00395 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */ 00396 00397 00398 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/ 00399 /* GNU gcc specific functions */ 00400 00401 /* Define macros for porting to both thumb1 and thumb2. 00402 * For thumb1, use low register (r0-r7), specified by constrant "l" 00403 * Otherwise, use general registers, specified by constrant "r" */ 00404 #if defined (__thumb__) && !defined (__thumb2__) 00405 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 00406 #define __CMSIS_GCC_USE_REG(r) "l" (r) 00407 #else 00408 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 00409 #define __CMSIS_GCC_USE_REG(r) "r" (r) 00410 #endif 00411 00412 /** \brief No Operation 00413 00414 No Operation does nothing. This instruction can be used for code alignment purposes. 00415 */ 00416 __attribute__((always_inline)) __STATIC_INLINE void __NOP(void) 00417 { 00418 __ASM volatile ("nop"); 00419 } 00420 00421 00422 /** \brief Wait For Interrupt 00423 00424 Wait For Interrupt is a hint instruction that suspends execution 00425 until one of a number of events occurs. 00426 */ 00427 __attribute__((always_inline)) __STATIC_INLINE void __WFI(void) 00428 { 00429 __ASM volatile ("wfi"); 00430 } 00431 00432 00433 /** \brief Wait For Event 00434 00435 Wait For Event is a hint instruction that permits the processor to enter 00436 a low-power state until one of a number of events occurs. 00437 */ 00438 __attribute__((always_inline)) __STATIC_INLINE void __WFE(void) 00439 { 00440 __ASM volatile ("wfe"); 00441 } 00442 00443 00444 /** \brief Send Event 00445 00446 Send Event is a hint instruction. It causes an event to be signaled to the CPU. 00447 */ 00448 __attribute__((always_inline)) __STATIC_INLINE void __SEV(void) 00449 { 00450 __ASM volatile ("sev"); 00451 } 00452 00453 00454 /** \brief Instruction Synchronization Barrier 00455 00456 Instruction Synchronization Barrier flushes the pipeline in the processor, 00457 so that all instructions following the ISB are fetched from cache or 00458 memory, after the instruction has been completed. 00459 */ 00460 __attribute__((always_inline)) __STATIC_INLINE void __ISB(void) 00461 { 00462 __ASM volatile ("isb 0xF":::"memory"); 00463 } 00464 00465 00466 /** \brief Data Synchronization Barrier 00467 00468 This function acts as a special kind of Data Memory Barrier. 00469 It completes when all explicit memory accesses before this instruction complete. 00470 */ 00471 __attribute__((always_inline)) __STATIC_INLINE void __DSB(void) 00472 { 00473 __ASM volatile ("dsb 0xF":::"memory"); 00474 } 00475 00476 00477 /** \brief Data Memory Barrier 00478 00479 This function ensures the apparent order of the explicit memory operations before 00480 and after the instruction, without ensuring their completion. 00481 */ 00482 __attribute__((always_inline)) __STATIC_INLINE void __DMB(void) 00483 { 00484 __ASM volatile ("dmb 0xF":::"memory"); 00485 } 00486 00487 00488 /** \brief Reverse byte order (32 bit) 00489 00490 This function reverses the byte order in integer value. 00491 00492 \param [in] value Value to reverse 00493 \return Reversed value 00494 */ 00495 __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV(uint32_t value) 00496 { 00497 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 00498 return __builtin_bswap32(value); 00499 #else 00500 uint32_t result; 00501 00502 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 00503 return(result); 00504 #endif 00505 } 00506 00507 00508 /** \brief Reverse byte order (16 bit) 00509 00510 This function reverses the byte order in two unsigned short values. 00511 00512 \param [in] value Value to reverse 00513 \return Reversed value 00514 */ 00515 __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value) 00516 { 00517 uint32_t result; 00518 00519 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 00520 return(result); 00521 } 00522 00523 00524 /** \brief Reverse byte order in signed short value 00525 00526 This function reverses the byte order in a signed short value with sign extension to integer. 00527 00528 \param [in] value Value to reverse 00529 \return Reversed value 00530 */ 00531 __attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value) 00532 { 00533 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 00534 return (short)__builtin_bswap16(value); 00535 #else 00536 uint32_t result; 00537 00538 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 00539 return(result); 00540 #endif 00541 } 00542 00543 00544 /** \brief Rotate Right in unsigned value (32 bit) 00545 00546 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. 00547 00548 \param [in] value Value to rotate 00549 \param [in] value Number of Bits to rotate 00550 \return Rotated value 00551 */ 00552 __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2) 00553 { 00554 return (op1 >> op2) | (op1 << (32 - op2)); 00555 } 00556 00557 00558 /** \brief Breakpoint 00559 00560 This function causes the processor to enter Debug state. 00561 Debug tools can use this to investigate system state when the instruction at a particular address is reached. 00562 00563 \param [in] value is ignored by the processor. 00564 If required, a debugger can use it to store additional information about the breakpoint. 00565 */ 00566 #define __BKPT(value) __ASM volatile ("bkpt "#value) 00567 00568 00569 /** \brief Reverse bit order of value 00570 00571 This function reverses the bit order of the given value. 00572 00573 \param [in] value Value to reverse 00574 \return Reversed value 00575 */ 00576 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value) 00577 { 00578 uint32_t result; 00579 00580 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) 00581 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) ); 00582 #else 00583 int32_t s = 4 /*sizeof(v)*/ * 8 - 1; // extra shift needed at end 00584 00585 result = value; // r will be reversed bits of v; first get LSB of v 00586 for (value >>= 1; value; value >>= 1) 00587 { 00588 result <<= 1; 00589 result |= value & 1; 00590 s--; 00591 } 00592 result <<= s; // shift when v's highest bits are zero 00593 #endif 00594 return(result); 00595 } 00596 00597 00598 /** \brief Count leading zeros 00599 00600 This function counts the number of leading zeros of a data value. 00601 00602 \param [in] value Value to count the leading zeros 00603 \return number of leading zeros in value 00604 */ 00605 #define __CLZ __builtin_clz 00606 00607 00608 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) 00609 00610 /** \brief LDR Exclusive (8 bit) 00611 00612 This function executes a exclusive LDR instruction for 8 bit value. 00613 00614 \param [in] ptr Pointer to data 00615 \return value of type uint8_t at (*ptr) 00616 */ 00617 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr) 00618 { 00619 uint32_t result; 00620 00621 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 00622 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) ); 00623 #else 00624 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 00625 accepted by assembler. So has to use following less efficient pattern. 00626 */ 00627 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); 00628 #endif 00629 return ((uint8_t) result); /* Add explicit type cast here */ 00630 } 00631 00632 00633 /** \brief LDR Exclusive (16 bit) 00634 00635 This function executes a exclusive LDR instruction for 16 bit values. 00636 00637 \param [in] ptr Pointer to data 00638 \return value of type uint16_t at (*ptr) 00639 */ 00640 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr) 00641 { 00642 uint32_t result; 00643 00644 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 00645 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) ); 00646 #else 00647 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 00648 accepted by assembler. So has to use following less efficient pattern. 00649 */ 00650 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); 00651 #endif 00652 return ((uint16_t) result); /* Add explicit type cast here */ 00653 } 00654 00655 00656 /** \brief LDR Exclusive (32 bit) 00657 00658 This function executes a exclusive LDR instruction for 32 bit values. 00659 00660 \param [in] ptr Pointer to data 00661 \return value of type uint32_t at (*ptr) 00662 */ 00663 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr) 00664 { 00665 uint32_t result; 00666 00667 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) ); 00668 return(result); 00669 } 00670 00671 00672 /** \brief STR Exclusive (8 bit) 00673 00674 This function executes a exclusive STR instruction for 8 bit values. 00675 00676 \param [in] value Value to store 00677 \param [in] ptr Pointer to location 00678 \return 0 Function succeeded 00679 \return 1 Function failed 00680 */ 00681 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr) 00682 { 00683 uint32_t result; 00684 00685 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); 00686 return(result); 00687 } 00688 00689 00690 /** \brief STR Exclusive (16 bit) 00691 00692 This function executes a exclusive STR instruction for 16 bit values. 00693 00694 \param [in] value Value to store 00695 \param [in] ptr Pointer to location 00696 \return 0 Function succeeded 00697 \return 1 Function failed 00698 */ 00699 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr) 00700 { 00701 uint32_t result; 00702 00703 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); 00704 return(result); 00705 } 00706 00707 00708 /** \brief STR Exclusive (32 bit) 00709 00710 This function executes a exclusive STR instruction for 32 bit values. 00711 00712 \param [in] value Value to store 00713 \param [in] ptr Pointer to location 00714 \return 0 Function succeeded 00715 \return 1 Function failed 00716 */ 00717 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr) 00718 { 00719 uint32_t result; 00720 00721 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) ); 00722 return(result); 00723 } 00724 00725 00726 /** \brief Remove the exclusive lock 00727 00728 This function removes the exclusive lock which is created by LDREX. 00729 00730 */ 00731 __attribute__((always_inline)) __STATIC_INLINE void __CLREX(void) 00732 { 00733 __ASM volatile ("clrex" ::: "memory"); 00734 } 00735 00736 00737 /** \brief Signed Saturate 00738 00739 This function saturates a signed value. 00740 00741 \param [in] value Value to be saturated 00742 \param [in] sat Bit position to saturate to (1..32) 00743 \return Saturated value 00744 */ 00745 #define __SSAT(ARG1,ARG2) \ 00746 ({ \ 00747 uint32_t __RES, __ARG1 = (ARG1); \ 00748 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 00749 __RES; \ 00750 }) 00751 00752 00753 /** \brief Unsigned Saturate 00754 00755 This function saturates an unsigned value. 00756 00757 \param [in] value Value to be saturated 00758 \param [in] sat Bit position to saturate to (0..31) 00759 \return Saturated value 00760 */ 00761 #define __USAT(ARG1,ARG2) \ 00762 ({ \ 00763 uint32_t __RES, __ARG1 = (ARG1); \ 00764 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 00765 __RES; \ 00766 }) 00767 00768 00769 /** \brief Rotate Right with Extend (32 bit) 00770 00771 This function moves each bit of a bitstring right by one bit. 00772 The carry input is shifted in at the left end of the bitstring. 00773 00774 \param [in] value Value to rotate 00775 \return Rotated value 00776 */ 00777 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value) 00778 { 00779 uint32_t result; 00780 00781 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 00782 return(result); 00783 } 00784 00785 00786 /** \brief LDRT Unprivileged (8 bit) 00787 00788 This function executes a Unprivileged LDRT instruction for 8 bit value. 00789 00790 \param [in] ptr Pointer to data 00791 \return value of type uint8_t at (*ptr) 00792 */ 00793 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr) 00794 { 00795 uint32_t result; 00796 00797 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 00798 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) ); 00799 #else 00800 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 00801 accepted by assembler. So has to use following less efficient pattern. 00802 */ 00803 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); 00804 #endif 00805 return ((uint8_t) result); /* Add explicit type cast here */ 00806 } 00807 00808 00809 /** \brief LDRT Unprivileged (16 bit) 00810 00811 This function executes a Unprivileged LDRT instruction for 16 bit values. 00812 00813 \param [in] ptr Pointer to data 00814 \return value of type uint16_t at (*ptr) 00815 */ 00816 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr) 00817 { 00818 uint32_t result; 00819 00820 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 00821 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) ); 00822 #else 00823 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 00824 accepted by assembler. So has to use following less efficient pattern. 00825 */ 00826 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); 00827 #endif 00828 return ((uint16_t) result); /* Add explicit type cast here */ 00829 } 00830 00831 00832 /** \brief LDRT Unprivileged (32 bit) 00833 00834 This function executes a Unprivileged LDRT instruction for 32 bit values. 00835 00836 \param [in] ptr Pointer to data 00837 \return value of type uint32_t at (*ptr) 00838 */ 00839 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr) 00840 { 00841 uint32_t result; 00842 00843 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) ); 00844 return(result); 00845 } 00846 00847 00848 /** \brief STRT Unprivileged (8 bit) 00849 00850 This function executes a Unprivileged STRT instruction for 8 bit values. 00851 00852 \param [in] value Value to store 00853 \param [in] ptr Pointer to location 00854 */ 00855 __attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr) 00856 { 00857 __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) ); 00858 } 00859 00860 00861 /** \brief STRT Unprivileged (16 bit) 00862 00863 This function executes a Unprivileged STRT instruction for 16 bit values. 00864 00865 \param [in] value Value to store 00866 \param [in] ptr Pointer to location 00867 */ 00868 __attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr) 00869 { 00870 __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) ); 00871 } 00872 00873 00874 /** \brief STRT Unprivileged (32 bit) 00875 00876 This function executes a Unprivileged STRT instruction for 32 bit values. 00877 00878 \param [in] value Value to store 00879 \param [in] ptr Pointer to location 00880 */ 00881 __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr) 00882 { 00883 __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) ); 00884 } 00885 00886 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */ 00887 00888 00889 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/ 00890 /* IAR iccarm specific functions */ 00891 #include <cmsis_iar.h> 00892 00893 00894 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/ 00895 /* TI CCS specific functions */ 00896 #include <cmsis_ccs.h> 00897 00898 00899 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/ 00900 /* TASKING carm specific functions */ 00901 /* 00902 * The CMSIS functions have been implemented as intrinsics in the compiler. 00903 * Please use "carm -?i" to get an up to date list of all intrinsics, 00904 * Including the CMSIS ones. 00905 */ 00906 00907 00908 #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/ 00909 /* Cosmic specific functions */ 00910 #include <cmsis_csm.h> 00911 00912 #endif 00913 00914 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */ 00915 00916 #endif /* __CORE_CMINSTR_H */
Generated on Tue Jul 12 2022 20:02:09 by 1.7.2