Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Fork of mbed by
Diff: LPC1768/core_cmInstr.h
- Revision:
- 46:890817bdcffb
- Parent:
- 40:976df7c37ad5
--- a/LPC1768/core_cmInstr.h Thu Nov 22 16:04:31 2012 +0000
+++ b/LPC1768/core_cmInstr.h Mon Nov 26 10:13:56 2012 +0000
@@ -1,16 +1,16 @@
/**************************************************************************//**
* @file core_cmInstr.h
* @brief CMSIS Cortex-M Core Instruction Access Header File
- * @version V3.00
- * @date 09. December 2011
+ * @version V3.03
+ * @date 29. August 2012
*
* @note
- * Copyright (C) 2009-2011 ARM Limited. All rights reserved.
+ * Copyright (C) 2009-2012 ARM Limited. All rights reserved.
*
* @par
- * ARM Limited (ARM) is supplying this software for use with Cortex-M
- * processor based microcontrollers. This file can be freely distributed
- * within development tools that are supporting such ARM based processors.
+ * ARM Limited (ARM) is supplying this software for use with Cortex-M
+ * processor based microcontrollers. This file can be freely distributed
+ * within development tools that are supporting such ARM based processors.
*
* @par
* THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
@@ -71,8 +71,8 @@
/** \brief Instruction Synchronization Barrier
- Instruction Synchronization Barrier flushes the pipeline in the processor,
- so that all instructions following the ISB are fetched from cache or
+ Instruction Synchronization Barrier flushes the pipeline in the processor,
+ so that all instructions following the ISB are fetched from cache or
memory, after the instruction has been completed.
*/
#define __ISB() __isb(0xF)
@@ -80,7 +80,7 @@
/** \brief Data Synchronization Barrier
- This function acts as a special kind of Data Memory Barrier.
+ This function acts as a special kind of Data Memory Barrier.
It completes when all explicit memory accesses before this instruction complete.
*/
#define __DSB() __dsb(0xF)
@@ -88,7 +88,7 @@
/** \brief Data Memory Barrier
- This function ensures the apparent order of the explicit memory operations before
+ This function ensures the apparent order of the explicit memory operations before
and after the instruction, without ensuring their completion.
*/
#define __DMB() __dmb(0xF)
@@ -111,12 +111,13 @@
\param [in] value Value to reverse
\return Reversed value
*/
-static __attribute__((section(".rev16_text"))) __INLINE __ASM uint32_t __REV16(uint32_t value)
+#ifndef __NO_EMBEDDED_ASM
+__attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
{
rev16 r0, r0
bx lr
}
-
+#endif
/** \brief Reverse byte order in signed short value
@@ -125,11 +126,35 @@
\param [in] value Value to reverse
\return Reversed value
*/
-static __attribute__((section(".revsh_text"))) __INLINE __ASM int32_t __REVSH(int32_t value)
+#ifndef __NO_EMBEDDED_ASM
+__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
{
revsh r0, r0
bx lr
}
+#endif
+
+
+/** \brief Rotate Right in unsigned value (32 bit)
+
+ This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
+
+ \param [in] value Value to rotate
+ \param [in] value Number of Bits to rotate
+ \return Rotated value
+ */
+#define __ROR __ror
+
+
+/** \brief Breakpoint
+
+ This function causes the processor to enter Debug state.
+ Debug tools can use this to investigate system state when the instruction at a particular address is reached.
+
+ \param [in] value is ignored by the processor.
+ If required, a debugger can use it to store additional information about the breakpoint.
+ */
+#define __BKPT(value) __breakpoint(value)
#if (__CORTEX_M >= 0x03)
@@ -247,7 +272,7 @@
\param [in] value Value to count the leading zeros
\return number of leading zeros in value
*/
-#define __CLZ __clz
+#define __CLZ __clz
#endif /* (__CORTEX_M >= 0x03) */
@@ -259,6 +284,12 @@
#include <cmsis_iar.h>
+#elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
+/* TI CCS specific functions */
+
+#include <cmsis_ccs.h>
+
+
#elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
/* GNU gcc specific functions */
@@ -266,7 +297,7 @@
No Operation does nothing. This instruction can be used for code alignment purposes.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __NOP(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
{
__ASM volatile ("nop");
}
@@ -277,7 +308,7 @@
Wait For Interrupt is a hint instruction that suspends execution
until one of a number of events occurs.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __WFI(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
{
__ASM volatile ("wfi");
}
@@ -288,7 +319,7 @@
Wait For Event is a hint instruction that permits the processor to enter
a low-power state until one of a number of events occurs.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __WFE(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
{
__ASM volatile ("wfe");
}
@@ -298,7 +329,7 @@
Send Event is a hint instruction. It causes an event to be signaled to the CPU.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __SEV(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
{
__ASM volatile ("sev");
}
@@ -306,11 +337,11 @@
/** \brief Instruction Synchronization Barrier
- Instruction Synchronization Barrier flushes the pipeline in the processor,
- so that all instructions following the ISB are fetched from cache or
+ Instruction Synchronization Barrier flushes the pipeline in the processor,
+ so that all instructions following the ISB are fetched from cache or
memory, after the instruction has been completed.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __ISB(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
{
__ASM volatile ("isb");
}
@@ -318,10 +349,10 @@
/** \brief Data Synchronization Barrier
- This function acts as a special kind of Data Memory Barrier.
+ This function acts as a special kind of Data Memory Barrier.
It completes when all explicit memory accesses before this instruction complete.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __DSB(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
{
__ASM volatile ("dsb");
}
@@ -329,10 +360,10 @@
/** \brief Data Memory Barrier
- This function ensures the apparent order of the explicit memory operations before
+ This function ensures the apparent order of the explicit memory operations before
and after the instruction, without ensuring their completion.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __DMB(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
{
__ASM volatile ("dmb");
}
@@ -345,10 +376,10 @@
\param [in] value Value to reverse
\return Reversed value
*/
-__attribute__( ( always_inline ) ) static __INLINE uint32_t __REV(uint32_t value)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
{
uint32_t result;
-
+
__ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
return(result);
}
@@ -361,10 +392,10 @@
\param [in] value Value to reverse
\return Reversed value
*/
-__attribute__( ( always_inline ) ) static __INLINE uint32_t __REV16(uint32_t value)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
{
uint32_t result;
-
+
__ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
return(result);
}
@@ -377,15 +408,42 @@
\param [in] value Value to reverse
\return Reversed value
*/
-__attribute__( ( always_inline ) ) static __INLINE int32_t __REVSH(int32_t value)
+__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
{
uint32_t result;
-
+
__ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
return(result);
}
+/** \brief Rotate Right in unsigned value (32 bit)
+
+ This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
+
+ \param [in] value Value to rotate
+ \param [in] value Number of Bits to rotate
+ \return Rotated value
+ */
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
+{
+
+ __ASM volatile ("ror %0, %0, %1" : "+r" (op1) : "r" (op2) );
+ return(op1);
+}
+
+
+/** \brief Breakpoint
+
+ This function causes the processor to enter Debug state.
+ Debug tools can use this to investigate system state when the instruction at a particular address is reached.
+
+ \param [in] value is ignored by the processor.
+ If required, a debugger can use it to store additional information about the breakpoint.
+ */
+#define __BKPT(value) __ASM volatile ("bkpt "#value)
+
+
#if (__CORTEX_M >= 0x03)
/** \brief Reverse bit order of value
@@ -395,10 +453,10 @@
\param [in] value Value to reverse
\return Reversed value
*/
-__attribute__( ( always_inline ) ) static __INLINE uint32_t __RBIT(uint32_t value)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
{
uint32_t result;
-
+
__ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
return(result);
}
@@ -411,10 +469,10 @@
\param [in] ptr Pointer to data
\return value of type uint8_t at (*ptr)
*/
-__attribute__( ( always_inline ) ) static __INLINE uint8_t __LDREXB(volatile uint8_t *addr)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
{
uint8_t result;
-
+
__ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
return(result);
}
@@ -427,10 +485,10 @@
\param [in] ptr Pointer to data
\return value of type uint16_t at (*ptr)
*/
-__attribute__( ( always_inline ) ) static __INLINE uint16_t __LDREXH(volatile uint16_t *addr)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
{
uint16_t result;
-
+
__ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
return(result);
}
@@ -443,10 +501,10 @@
\param [in] ptr Pointer to data
\return value of type uint32_t at (*ptr)
*/
-__attribute__( ( always_inline ) ) static __INLINE uint32_t __LDREXW(volatile uint32_t *addr)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
{
uint32_t result;
-
+
__ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
return(result);
}
@@ -461,10 +519,10 @@
\return 0 Function succeeded
\return 1 Function failed
*/
-__attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
{
uint32_t result;
-
+
__ASM volatile ("strexb %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
return(result);
}
@@ -479,10 +537,10 @@
\return 0 Function succeeded
\return 1 Function failed
*/
-__attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
{
uint32_t result;
-
+
__ASM volatile ("strexh %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
return(result);
}
@@ -497,10 +555,10 @@
\return 0 Function succeeded
\return 1 Function failed
*/
-__attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
{
uint32_t result;
-
+
__ASM volatile ("strex %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
return(result);
}
@@ -511,7 +569,7 @@
This function removes the exclusive lock which is created by LDREX.
*/
-__attribute__( ( always_inline ) ) static __INLINE void __CLREX(void)
+__attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
{
__ASM volatile ("clrex");
}
@@ -556,10 +614,10 @@
\param [in] value Value to count the leading zeros
\return number of leading zeros in value
*/
-__attribute__( ( always_inline ) ) static __INLINE uint8_t __CLZ(uint32_t value)
+__attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
{
uint8_t result;
-
+
__ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
return(result);
}
