fork

Dependencies:   mbed

Fork of LG by igor Apu

Revision:
1:f2adcae3d304
Parent:
0:8ad47e2b6f00
--- a/core_cmInstr.h	Sat Jan 30 13:00:39 2016 +0000
+++ b/core_cmInstr.h	Sat Jan 30 13:53:19 2016 +0000
@@ -8,9 +8,9 @@
  * Copyright (C) 2009-2010 ARM Limited. All rights reserved.
  *
  * @par
- * ARM Limited (ARM) is supplying this software for use with Cortex-M 
- * processor based microcontrollers.  This file can be freely distributed 
- * within development tools that are supporting such ARM based processors. 
+ * ARM Limited (ARM) is supplying this software for use with Cortex-M
+ * processor based microcontrollers.  This file can be freely distributed
+ * within development tools that are supporting such ARM based processors.
  *
  * @par
  * THIS SOFTWARE IS PROVIDED "AS IS".  NO WARRANTIES, WHETHER EXPRESS, IMPLIED
@@ -66,8 +66,8 @@
 
 /** \brief  Instruction Synchronization Barrier
 
-    Instruction Synchronization Barrier flushes the pipeline in the processor, 
-    so that all instructions following the ISB are fetched from cache or 
+    Instruction Synchronization Barrier flushes the pipeline in the processor,
+    so that all instructions following the ISB are fetched from cache or
     memory, after the instruction has been completed.
  */
 #define __ISB()                           __isb(0xF)
@@ -75,7 +75,7 @@
 
 /** \brief  Data Synchronization Barrier
 
-    This function acts as a special kind of Data Memory Barrier. 
+    This function acts as a special kind of Data Memory Barrier.
     It completes when all explicit memory accesses before this instruction complete.
  */
 #define __DSB()                           __dsb(0xF)
@@ -83,7 +83,7 @@
 
 /** \brief  Data Memory Barrier
 
-    This function ensures the apparent order of the explicit memory operations before 
+    This function ensures the apparent order of the explicit memory operations before
     and after the instruction, without ensuring their completion.
  */
 #define __DMB()                           __dmb(0xF)
@@ -111,10 +111,10 @@
 #else  /* (__ARMCC_VERSION >= 400677)  */
 static __INLINE __ASM uint32_t __REV16(uint32_t value)
 {
-  rev16 r0, r0
-  bx lr
+    rev16 r0, r0
+    bx lr
 }
-#endif /* __ARMCC_VERSION  */ 
+#endif /* __ARMCC_VERSION  */
 
 
 /** \brief  Reverse byte order in signed short value
@@ -129,10 +129,10 @@
 #else  /* (__ARMCC_VERSION >= 400677)  */
 static __INLINE __ASM int32_t __REVSH(int32_t value)
 {
-  revsh r0, r0
-  bx lr
+    revsh r0, r0
+    bx lr
 }
-#endif /* __ARMCC_VERSION  */ 
+#endif /* __ARMCC_VERSION  */
 
 
 #if       (__CORTEX_M >= 0x03)
@@ -222,7 +222,7 @@
 extern void __CLREX(void);
 #else  /* (__ARMCC_VERSION >= 400000)  */
 #define __CLREX                           __clrex
-#endif /* __ARMCC_VERSION  */ 
+#endif /* __ARMCC_VERSION  */
 
 
 /** \brief  Signed Saturate
@@ -254,7 +254,7 @@
     \param [in]  value  Value to count the leading zeros
     \return             number of leading zeros in value
  */
-#define __CLZ                             __clz 
+#define __CLZ                             __clz
 
 #endif /* (__CORTEX_M >= 0x03) */
 
@@ -281,7 +281,7 @@
  */
 static __INLINE  void __WFI(void)
 {
-  __ASM ("wfi");
+    __ASM ("wfi");
 }
 
 
@@ -292,7 +292,7 @@
  */
 static __INLINE  void __WFE(void)
 {
-  __ASM ("wfe");
+    __ASM ("wfe");
 }
 
 
@@ -302,7 +302,7 @@
  */
 static __INLINE  void __SEV(void)
 {
-  __ASM ("sev");
+    __ASM ("sev");
 }
 
 
@@ -323,7 +323,7 @@
  */
 static uint32_t __REV16(uint32_t value)
 {
-  __ASM("rev16 r0, r0");
+    __ASM("rev16 r0, r0");
 }
 
 
@@ -341,7 +341,7 @@
  */
 static uint32_t __RBIT(uint32_t value)
 {
-  __ASM("rbit r0, r0");
+    __ASM("rbit r0, r0");
 }
 
 
@@ -354,7 +354,7 @@
  */
 static uint8_t __LDREXB(volatile uint8_t *addr)
 {
-  __ASM("ldrexb r0, [r0]");
+    __ASM("ldrexb r0, [r0]");
 }
 
 
@@ -367,7 +367,7 @@
  */
 static uint16_t __LDREXH(volatile uint16_t *addr)
 {
-  __ASM("ldrexh r0, [r0]");
+    __ASM("ldrexh r0, [r0]");
 }
 
 
@@ -381,7 +381,7 @@
 /* intrinsic unsigned long __LDREX(unsigned long *)  (see intrinsics.h) */
 static uint32_t __LDREXW(volatile uint32_t *addr)
 {
-  __ASM("ldrex r0, [r0]");
+    __ASM("ldrex r0, [r0]");
 }
 
 
@@ -396,7 +396,7 @@
  */
 static uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
 {
-  __ASM("strexb r0, r0, [r1]");
+    __ASM("strexb r0, r0, [r1]");
 }
 
 
@@ -411,7 +411,7 @@
  */
 static uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
 {
-  __ASM("strexh r0, r0, [r1]");
+    __ASM("strexh r0, r0, [r1]");
 }
 
 
@@ -427,7 +427,7 @@
 /* intrinsic unsigned long __STREX(unsigned long, unsigned long)  (see intrinsics.h )*/
 static uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
 {
-  __ASM("strex r0, r0, [r1]");
+    __ASM("strex r0, r0, [r1]");
 }
 
 
@@ -438,7 +438,7 @@
  */
 static __INLINE void __CLREX(void)
 {
-  __ASM ("clrex");
+    __ASM ("clrex");
 }
 
 /* intrinsic   unsigned char __CLZ( unsigned long )      (see intrinsics.h) */
@@ -458,7 +458,7 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE void __NOP(void)
 {
-  __ASM volatile ("nop");
+    __ASM volatile ("nop");
 }
 
 
@@ -469,7 +469,7 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE void __WFI(void)
 {
-  __ASM volatile ("wfi");
+    __ASM volatile ("wfi");
 }
 
 
@@ -480,7 +480,7 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE void __WFE(void)
 {
-  __ASM volatile ("wfe");
+    __ASM volatile ("wfe");
 }
 
 
@@ -490,41 +490,41 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE void __SEV(void)
 {
-  __ASM volatile ("sev");
+    __ASM volatile ("sev");
 }
 
 
 /** \brief  Instruction Synchronization Barrier
 
-    Instruction Synchronization Barrier flushes the pipeline in the processor, 
-    so that all instructions following the ISB are fetched from cache or 
+    Instruction Synchronization Barrier flushes the pipeline in the processor,
+    so that all instructions following the ISB are fetched from cache or
     memory, after the instruction has been completed.
  */
 __attribute__( ( always_inline ) ) static __INLINE void __ISB(void)
 {
-  __ASM volatile ("isb");
+    __ASM volatile ("isb");
 }
 
 
 /** \brief  Data Synchronization Barrier
 
-    This function acts as a special kind of Data Memory Barrier. 
+    This function acts as a special kind of Data Memory Barrier.
     It completes when all explicit memory accesses before this instruction complete.
  */
 __attribute__( ( always_inline ) ) static __INLINE void __DSB(void)
 {
-  __ASM volatile ("dsb");
+    __ASM volatile ("dsb");
 }
 
 
 /** \brief  Data Memory Barrier
 
-    This function ensures the apparent order of the explicit memory operations before 
+    This function ensures the apparent order of the explicit memory operations before
     and after the instruction, without ensuring their completion.
  */
 __attribute__( ( always_inline ) ) static __INLINE void __DMB(void)
 {
-  __ASM volatile ("dmb");
+    __ASM volatile ("dmb");
 }
 
 
@@ -537,10 +537,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE uint32_t __REV(uint32_t value)
 {
-  uint32_t result;
-  
-  __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
-  return(result);
+    uint32_t result;
+
+__ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
+    return(result);
 }
 
 
@@ -553,10 +553,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE uint32_t __REV16(uint32_t value)
 {
-  uint32_t result;
-  
-  __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
-  return(result);
+    uint32_t result;
+
+__ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
+    return(result);
 }
 
 
@@ -569,10 +569,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE int32_t __REVSH(int32_t value)
 {
-  uint32_t result;
-  
-  __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
-  return(result);
+    uint32_t result;
+
+__ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
+    return(result);
 }
 
 
@@ -587,10 +587,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE uint32_t __RBIT(uint32_t value)
 {
-  uint32_t result;
-  
-   __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
-   return(result);
+    uint32_t result;
+
+__ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
+    return(result);
 }
 
 
@@ -604,9 +604,9 @@
 __attribute__( ( always_inline ) ) static __INLINE uint8_t __LDREXB(volatile uint8_t *addr)
 {
     uint8_t result;
-  
-   __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
-   return(result);
+
+__ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
+    return(result);
 }
 
 
@@ -620,9 +620,9 @@
 __attribute__( ( always_inline ) ) static __INLINE uint16_t __LDREXH(volatile uint16_t *addr)
 {
     uint16_t result;
-  
-   __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
-   return(result);
+
+__ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
+    return(result);
 }
 
 
@@ -636,9 +636,9 @@
 __attribute__( ( always_inline ) ) static __INLINE uint32_t __LDREXW(volatile uint32_t *addr)
 {
     uint32_t result;
-  
-   __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
-   return(result);
+
+__ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
+    return(result);
 }
 
 
@@ -653,10 +653,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
 {
-   uint32_t result;
-  
-   __ASM volatile ("strexb %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
-   return(result);
+    uint32_t result;
+
+__ASM volatile ("strexb %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
+    return(result);
 }
 
 
@@ -671,10 +671,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
 {
-   uint32_t result;
-  
-   __ASM volatile ("strexh %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
-   return(result);
+    uint32_t result;
+
+__ASM volatile ("strexh %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
+    return(result);
 }
 
 
@@ -689,10 +689,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
 {
-   uint32_t result;
-  
-   __ASM volatile ("strex %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
-   return(result);
+    uint32_t result;
+
+__ASM volatile ("strex %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
+    return(result);
 }
 
 
@@ -703,7 +703,7 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE void __CLREX(void)
 {
-  __ASM volatile ("clrex");
+    __ASM volatile ("clrex");
 }
 
 
@@ -748,10 +748,10 @@
  */
 __attribute__( ( always_inline ) ) static __INLINE uint8_t __CLZ(uint32_t value)
 {
-  uint8_t result;
-  
-  __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
-  return(result);
+    uint8_t result;
+
+__ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
+    return(result);
 }
 
 #endif /* (__CORTEX_M >= 0x03) */