Public fork of mbed-src to add generic stm32f030k6 target

Fork of mbed-src by mbed official

Committer:
ersatzavian
Date:
Tue Jul 21 14:09:45 2015 -0700
Revision:
596:d25a30803853
Parent:
13:0645d8841f51
added missing bracket probably I hope

Who changed what in which revision?

UserRevisionLine numberNew contents of line
bogdanm 13:0645d8841f51 1 /**************************************************************************//**
bogdanm 13:0645d8841f51 2 * @file core_cmInstr.h
bogdanm 13:0645d8841f51 3 * @brief CMSIS Cortex-M Core Instruction Access Header File
bogdanm 13:0645d8841f51 4 * @version V3.20
bogdanm 13:0645d8841f51 5 * @date 05. March 2013
bogdanm 13:0645d8841f51 6 *
bogdanm 13:0645d8841f51 7 * @note
bogdanm 13:0645d8841f51 8 *
bogdanm 13:0645d8841f51 9 ******************************************************************************/
bogdanm 13:0645d8841f51 10 /* Copyright (c) 2009 - 2013 ARM LIMITED
bogdanm 13:0645d8841f51 11
bogdanm 13:0645d8841f51 12 All rights reserved.
bogdanm 13:0645d8841f51 13 Redistribution and use in source and binary forms, with or without
bogdanm 13:0645d8841f51 14 modification, are permitted provided that the following conditions are met:
bogdanm 13:0645d8841f51 15 - Redistributions of source code must retain the above copyright
bogdanm 13:0645d8841f51 16 notice, this list of conditions and the following disclaimer.
bogdanm 13:0645d8841f51 17 - Redistributions in binary form must reproduce the above copyright
bogdanm 13:0645d8841f51 18 notice, this list of conditions and the following disclaimer in the
bogdanm 13:0645d8841f51 19 documentation and/or other materials provided with the distribution.
bogdanm 13:0645d8841f51 20 - Neither the name of ARM nor the names of its contributors may be used
bogdanm 13:0645d8841f51 21 to endorse or promote products derived from this software without
bogdanm 13:0645d8841f51 22 specific prior written permission.
bogdanm 13:0645d8841f51 23 *
bogdanm 13:0645d8841f51 24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
bogdanm 13:0645d8841f51 25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
bogdanm 13:0645d8841f51 26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
bogdanm 13:0645d8841f51 27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
bogdanm 13:0645d8841f51 28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
bogdanm 13:0645d8841f51 29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
bogdanm 13:0645d8841f51 30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
bogdanm 13:0645d8841f51 31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
bogdanm 13:0645d8841f51 32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
bogdanm 13:0645d8841f51 33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
bogdanm 13:0645d8841f51 34 POSSIBILITY OF SUCH DAMAGE.
bogdanm 13:0645d8841f51 35 ---------------------------------------------------------------------------*/
bogdanm 13:0645d8841f51 36
bogdanm 13:0645d8841f51 37
bogdanm 13:0645d8841f51 38 #ifndef __CORE_CMINSTR_H
bogdanm 13:0645d8841f51 39 #define __CORE_CMINSTR_H
bogdanm 13:0645d8841f51 40
bogdanm 13:0645d8841f51 41
bogdanm 13:0645d8841f51 42 /* ########################## Core Instruction Access ######################### */
bogdanm 13:0645d8841f51 43 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
bogdanm 13:0645d8841f51 44 Access to dedicated instructions
bogdanm 13:0645d8841f51 45 @{
bogdanm 13:0645d8841f51 46 */
bogdanm 13:0645d8841f51 47
bogdanm 13:0645d8841f51 48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
bogdanm 13:0645d8841f51 49 /* ARM armcc specific functions */
bogdanm 13:0645d8841f51 50
bogdanm 13:0645d8841f51 51 #if (__ARMCC_VERSION < 400677)
bogdanm 13:0645d8841f51 52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
bogdanm 13:0645d8841f51 53 #endif
bogdanm 13:0645d8841f51 54
bogdanm 13:0645d8841f51 55
bogdanm 13:0645d8841f51 56 /** \brief No Operation
bogdanm 13:0645d8841f51 57
bogdanm 13:0645d8841f51 58 No Operation does nothing. This instruction can be used for code alignment purposes.
bogdanm 13:0645d8841f51 59 */
bogdanm 13:0645d8841f51 60 #define __NOP __nop
bogdanm 13:0645d8841f51 61
bogdanm 13:0645d8841f51 62
bogdanm 13:0645d8841f51 63 /** \brief Wait For Interrupt
bogdanm 13:0645d8841f51 64
bogdanm 13:0645d8841f51 65 Wait For Interrupt is a hint instruction that suspends execution
bogdanm 13:0645d8841f51 66 until one of a number of events occurs.
bogdanm 13:0645d8841f51 67 */
bogdanm 13:0645d8841f51 68 #define __WFI __wfi
bogdanm 13:0645d8841f51 69
bogdanm 13:0645d8841f51 70
bogdanm 13:0645d8841f51 71 /** \brief Wait For Event
bogdanm 13:0645d8841f51 72
bogdanm 13:0645d8841f51 73 Wait For Event is a hint instruction that permits the processor to enter
bogdanm 13:0645d8841f51 74 a low-power state until one of a number of events occurs.
bogdanm 13:0645d8841f51 75 */
bogdanm 13:0645d8841f51 76 #define __WFE __wfe
bogdanm 13:0645d8841f51 77
bogdanm 13:0645d8841f51 78
bogdanm 13:0645d8841f51 79 /** \brief Send Event
bogdanm 13:0645d8841f51 80
bogdanm 13:0645d8841f51 81 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
bogdanm 13:0645d8841f51 82 */
bogdanm 13:0645d8841f51 83 #define __SEV __sev
bogdanm 13:0645d8841f51 84
bogdanm 13:0645d8841f51 85
bogdanm 13:0645d8841f51 86 /** \brief Instruction Synchronization Barrier
bogdanm 13:0645d8841f51 87
bogdanm 13:0645d8841f51 88 Instruction Synchronization Barrier flushes the pipeline in the processor,
bogdanm 13:0645d8841f51 89 so that all instructions following the ISB are fetched from cache or
bogdanm 13:0645d8841f51 90 memory, after the instruction has been completed.
bogdanm 13:0645d8841f51 91 */
bogdanm 13:0645d8841f51 92 #define __ISB() __isb(0xF)
bogdanm 13:0645d8841f51 93
bogdanm 13:0645d8841f51 94
bogdanm 13:0645d8841f51 95 /** \brief Data Synchronization Barrier
bogdanm 13:0645d8841f51 96
bogdanm 13:0645d8841f51 97 This function acts as a special kind of Data Memory Barrier.
bogdanm 13:0645d8841f51 98 It completes when all explicit memory accesses before this instruction complete.
bogdanm 13:0645d8841f51 99 */
bogdanm 13:0645d8841f51 100 #define __DSB() __dsb(0xF)
bogdanm 13:0645d8841f51 101
bogdanm 13:0645d8841f51 102
bogdanm 13:0645d8841f51 103 /** \brief Data Memory Barrier
bogdanm 13:0645d8841f51 104
bogdanm 13:0645d8841f51 105 This function ensures the apparent order of the explicit memory operations before
bogdanm 13:0645d8841f51 106 and after the instruction, without ensuring their completion.
bogdanm 13:0645d8841f51 107 */
bogdanm 13:0645d8841f51 108 #define __DMB() __dmb(0xF)
bogdanm 13:0645d8841f51 109
bogdanm 13:0645d8841f51 110
bogdanm 13:0645d8841f51 111 /** \brief Reverse byte order (32 bit)
bogdanm 13:0645d8841f51 112
bogdanm 13:0645d8841f51 113 This function reverses the byte order in integer value.
bogdanm 13:0645d8841f51 114
bogdanm 13:0645d8841f51 115 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 116 \return Reversed value
bogdanm 13:0645d8841f51 117 */
bogdanm 13:0645d8841f51 118 #define __REV __rev
bogdanm 13:0645d8841f51 119
bogdanm 13:0645d8841f51 120
bogdanm 13:0645d8841f51 121 /** \brief Reverse byte order (16 bit)
bogdanm 13:0645d8841f51 122
bogdanm 13:0645d8841f51 123 This function reverses the byte order in two unsigned short values.
bogdanm 13:0645d8841f51 124
bogdanm 13:0645d8841f51 125 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 126 \return Reversed value
bogdanm 13:0645d8841f51 127 */
bogdanm 13:0645d8841f51 128 #ifndef __NO_EMBEDDED_ASM
bogdanm 13:0645d8841f51 129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
bogdanm 13:0645d8841f51 130 {
bogdanm 13:0645d8841f51 131 rev16 r0, r0
bogdanm 13:0645d8841f51 132 bx lr
bogdanm 13:0645d8841f51 133 }
bogdanm 13:0645d8841f51 134 #endif
bogdanm 13:0645d8841f51 135
bogdanm 13:0645d8841f51 136 /** \brief Reverse byte order in signed short value
bogdanm 13:0645d8841f51 137
bogdanm 13:0645d8841f51 138 This function reverses the byte order in a signed short value with sign extension to integer.
bogdanm 13:0645d8841f51 139
bogdanm 13:0645d8841f51 140 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 141 \return Reversed value
bogdanm 13:0645d8841f51 142 */
bogdanm 13:0645d8841f51 143 #ifndef __NO_EMBEDDED_ASM
bogdanm 13:0645d8841f51 144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
bogdanm 13:0645d8841f51 145 {
bogdanm 13:0645d8841f51 146 revsh r0, r0
bogdanm 13:0645d8841f51 147 bx lr
bogdanm 13:0645d8841f51 148 }
bogdanm 13:0645d8841f51 149 #endif
bogdanm 13:0645d8841f51 150
bogdanm 13:0645d8841f51 151
bogdanm 13:0645d8841f51 152 /** \brief Rotate Right in unsigned value (32 bit)
bogdanm 13:0645d8841f51 153
bogdanm 13:0645d8841f51 154 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
bogdanm 13:0645d8841f51 155
bogdanm 13:0645d8841f51 156 \param [in] value Value to rotate
bogdanm 13:0645d8841f51 157 \param [in] value Number of Bits to rotate
bogdanm 13:0645d8841f51 158 \return Rotated value
bogdanm 13:0645d8841f51 159 */
bogdanm 13:0645d8841f51 160 #define __ROR __ror
bogdanm 13:0645d8841f51 161
bogdanm 13:0645d8841f51 162
bogdanm 13:0645d8841f51 163 /** \brief Breakpoint
bogdanm 13:0645d8841f51 164
bogdanm 13:0645d8841f51 165 This function causes the processor to enter Debug state.
bogdanm 13:0645d8841f51 166 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
bogdanm 13:0645d8841f51 167
bogdanm 13:0645d8841f51 168 \param [in] value is ignored by the processor.
bogdanm 13:0645d8841f51 169 If required, a debugger can use it to store additional information about the breakpoint.
bogdanm 13:0645d8841f51 170 */
bogdanm 13:0645d8841f51 171 #define __BKPT(value) __breakpoint(value)
bogdanm 13:0645d8841f51 172
bogdanm 13:0645d8841f51 173
bogdanm 13:0645d8841f51 174 #if (__CORTEX_M >= 0x03)
bogdanm 13:0645d8841f51 175
bogdanm 13:0645d8841f51 176 /** \brief Reverse bit order of value
bogdanm 13:0645d8841f51 177
bogdanm 13:0645d8841f51 178 This function reverses the bit order of the given value.
bogdanm 13:0645d8841f51 179
bogdanm 13:0645d8841f51 180 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 181 \return Reversed value
bogdanm 13:0645d8841f51 182 */
bogdanm 13:0645d8841f51 183 #define __RBIT __rbit
bogdanm 13:0645d8841f51 184
bogdanm 13:0645d8841f51 185
bogdanm 13:0645d8841f51 186 /** \brief LDR Exclusive (8 bit)
bogdanm 13:0645d8841f51 187
bogdanm 13:0645d8841f51 188 This function performs a exclusive LDR command for 8 bit value.
bogdanm 13:0645d8841f51 189
bogdanm 13:0645d8841f51 190 \param [in] ptr Pointer to data
bogdanm 13:0645d8841f51 191 \return value of type uint8_t at (*ptr)
bogdanm 13:0645d8841f51 192 */
bogdanm 13:0645d8841f51 193 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
bogdanm 13:0645d8841f51 194
bogdanm 13:0645d8841f51 195
bogdanm 13:0645d8841f51 196 /** \brief LDR Exclusive (16 bit)
bogdanm 13:0645d8841f51 197
bogdanm 13:0645d8841f51 198 This function performs a exclusive LDR command for 16 bit values.
bogdanm 13:0645d8841f51 199
bogdanm 13:0645d8841f51 200 \param [in] ptr Pointer to data
bogdanm 13:0645d8841f51 201 \return value of type uint16_t at (*ptr)
bogdanm 13:0645d8841f51 202 */
bogdanm 13:0645d8841f51 203 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
bogdanm 13:0645d8841f51 204
bogdanm 13:0645d8841f51 205
bogdanm 13:0645d8841f51 206 /** \brief LDR Exclusive (32 bit)
bogdanm 13:0645d8841f51 207
bogdanm 13:0645d8841f51 208 This function performs a exclusive LDR command for 32 bit values.
bogdanm 13:0645d8841f51 209
bogdanm 13:0645d8841f51 210 \param [in] ptr Pointer to data
bogdanm 13:0645d8841f51 211 \return value of type uint32_t at (*ptr)
bogdanm 13:0645d8841f51 212 */
bogdanm 13:0645d8841f51 213 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
bogdanm 13:0645d8841f51 214
bogdanm 13:0645d8841f51 215
bogdanm 13:0645d8841f51 216 /** \brief STR Exclusive (8 bit)
bogdanm 13:0645d8841f51 217
bogdanm 13:0645d8841f51 218 This function performs a exclusive STR command for 8 bit values.
bogdanm 13:0645d8841f51 219
bogdanm 13:0645d8841f51 220 \param [in] value Value to store
bogdanm 13:0645d8841f51 221 \param [in] ptr Pointer to location
bogdanm 13:0645d8841f51 222 \return 0 Function succeeded
bogdanm 13:0645d8841f51 223 \return 1 Function failed
bogdanm 13:0645d8841f51 224 */
bogdanm 13:0645d8841f51 225 #define __STREXB(value, ptr) __strex(value, ptr)
bogdanm 13:0645d8841f51 226
bogdanm 13:0645d8841f51 227
bogdanm 13:0645d8841f51 228 /** \brief STR Exclusive (16 bit)
bogdanm 13:0645d8841f51 229
bogdanm 13:0645d8841f51 230 This function performs a exclusive STR command for 16 bit values.
bogdanm 13:0645d8841f51 231
bogdanm 13:0645d8841f51 232 \param [in] value Value to store
bogdanm 13:0645d8841f51 233 \param [in] ptr Pointer to location
bogdanm 13:0645d8841f51 234 \return 0 Function succeeded
bogdanm 13:0645d8841f51 235 \return 1 Function failed
bogdanm 13:0645d8841f51 236 */
bogdanm 13:0645d8841f51 237 #define __STREXH(value, ptr) __strex(value, ptr)
bogdanm 13:0645d8841f51 238
bogdanm 13:0645d8841f51 239
bogdanm 13:0645d8841f51 240 /** \brief STR Exclusive (32 bit)
bogdanm 13:0645d8841f51 241
bogdanm 13:0645d8841f51 242 This function performs a exclusive STR command for 32 bit values.
bogdanm 13:0645d8841f51 243
bogdanm 13:0645d8841f51 244 \param [in] value Value to store
bogdanm 13:0645d8841f51 245 \param [in] ptr Pointer to location
bogdanm 13:0645d8841f51 246 \return 0 Function succeeded
bogdanm 13:0645d8841f51 247 \return 1 Function failed
bogdanm 13:0645d8841f51 248 */
bogdanm 13:0645d8841f51 249 #define __STREXW(value, ptr) __strex(value, ptr)
bogdanm 13:0645d8841f51 250
bogdanm 13:0645d8841f51 251
bogdanm 13:0645d8841f51 252 /** \brief Remove the exclusive lock
bogdanm 13:0645d8841f51 253
bogdanm 13:0645d8841f51 254 This function removes the exclusive lock which is created by LDREX.
bogdanm 13:0645d8841f51 255
bogdanm 13:0645d8841f51 256 */
bogdanm 13:0645d8841f51 257 #define __CLREX __clrex
bogdanm 13:0645d8841f51 258
bogdanm 13:0645d8841f51 259
bogdanm 13:0645d8841f51 260 /** \brief Signed Saturate
bogdanm 13:0645d8841f51 261
bogdanm 13:0645d8841f51 262 This function saturates a signed value.
bogdanm 13:0645d8841f51 263
bogdanm 13:0645d8841f51 264 \param [in] value Value to be saturated
bogdanm 13:0645d8841f51 265 \param [in] sat Bit position to saturate to (1..32)
bogdanm 13:0645d8841f51 266 \return Saturated value
bogdanm 13:0645d8841f51 267 */
bogdanm 13:0645d8841f51 268 #define __SSAT __ssat
bogdanm 13:0645d8841f51 269
bogdanm 13:0645d8841f51 270
bogdanm 13:0645d8841f51 271 /** \brief Unsigned Saturate
bogdanm 13:0645d8841f51 272
bogdanm 13:0645d8841f51 273 This function saturates an unsigned value.
bogdanm 13:0645d8841f51 274
bogdanm 13:0645d8841f51 275 \param [in] value Value to be saturated
bogdanm 13:0645d8841f51 276 \param [in] sat Bit position to saturate to (0..31)
bogdanm 13:0645d8841f51 277 \return Saturated value
bogdanm 13:0645d8841f51 278 */
bogdanm 13:0645d8841f51 279 #define __USAT __usat
bogdanm 13:0645d8841f51 280
bogdanm 13:0645d8841f51 281
bogdanm 13:0645d8841f51 282 /** \brief Count leading zeros
bogdanm 13:0645d8841f51 283
bogdanm 13:0645d8841f51 284 This function counts the number of leading zeros of a data value.
bogdanm 13:0645d8841f51 285
bogdanm 13:0645d8841f51 286 \param [in] value Value to count the leading zeros
bogdanm 13:0645d8841f51 287 \return number of leading zeros in value
bogdanm 13:0645d8841f51 288 */
bogdanm 13:0645d8841f51 289 #define __CLZ __clz
bogdanm 13:0645d8841f51 290
bogdanm 13:0645d8841f51 291 #endif /* (__CORTEX_M >= 0x03) */
bogdanm 13:0645d8841f51 292
bogdanm 13:0645d8841f51 293
bogdanm 13:0645d8841f51 294
bogdanm 13:0645d8841f51 295 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
bogdanm 13:0645d8841f51 296 /* IAR iccarm specific functions */
bogdanm 13:0645d8841f51 297
bogdanm 13:0645d8841f51 298 #include <cmsis_iar.h>
bogdanm 13:0645d8841f51 299
bogdanm 13:0645d8841f51 300
bogdanm 13:0645d8841f51 301 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
bogdanm 13:0645d8841f51 302 /* TI CCS specific functions */
bogdanm 13:0645d8841f51 303
bogdanm 13:0645d8841f51 304 #include <cmsis_ccs.h>
bogdanm 13:0645d8841f51 305
bogdanm 13:0645d8841f51 306
bogdanm 13:0645d8841f51 307 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
bogdanm 13:0645d8841f51 308 /* GNU gcc specific functions */
bogdanm 13:0645d8841f51 309
bogdanm 13:0645d8841f51 310 /* Define macros for porting to both thumb1 and thumb2.
bogdanm 13:0645d8841f51 311 * For thumb1, use low register (r0-r7), specified by constrant "l"
bogdanm 13:0645d8841f51 312 * Otherwise, use general registers, specified by constrant "r" */
bogdanm 13:0645d8841f51 313 #if defined (__thumb__) && !defined (__thumb2__)
bogdanm 13:0645d8841f51 314 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
bogdanm 13:0645d8841f51 315 #define __CMSIS_GCC_USE_REG(r) "l" (r)
bogdanm 13:0645d8841f51 316 #else
bogdanm 13:0645d8841f51 317 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
bogdanm 13:0645d8841f51 318 #define __CMSIS_GCC_USE_REG(r) "r" (r)
bogdanm 13:0645d8841f51 319 #endif
bogdanm 13:0645d8841f51 320
bogdanm 13:0645d8841f51 321 /** \brief No Operation
bogdanm 13:0645d8841f51 322
bogdanm 13:0645d8841f51 323 No Operation does nothing. This instruction can be used for code alignment purposes.
bogdanm 13:0645d8841f51 324 */
bogdanm 13:0645d8841f51 325 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
bogdanm 13:0645d8841f51 326 {
bogdanm 13:0645d8841f51 327 __ASM volatile ("nop");
bogdanm 13:0645d8841f51 328 }
bogdanm 13:0645d8841f51 329
bogdanm 13:0645d8841f51 330
bogdanm 13:0645d8841f51 331 /** \brief Wait For Interrupt
bogdanm 13:0645d8841f51 332
bogdanm 13:0645d8841f51 333 Wait For Interrupt is a hint instruction that suspends execution
bogdanm 13:0645d8841f51 334 until one of a number of events occurs.
bogdanm 13:0645d8841f51 335 */
bogdanm 13:0645d8841f51 336 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
bogdanm 13:0645d8841f51 337 {
bogdanm 13:0645d8841f51 338 __ASM volatile ("wfi");
bogdanm 13:0645d8841f51 339 }
bogdanm 13:0645d8841f51 340
bogdanm 13:0645d8841f51 341
bogdanm 13:0645d8841f51 342 /** \brief Wait For Event
bogdanm 13:0645d8841f51 343
bogdanm 13:0645d8841f51 344 Wait For Event is a hint instruction that permits the processor to enter
bogdanm 13:0645d8841f51 345 a low-power state until one of a number of events occurs.
bogdanm 13:0645d8841f51 346 */
bogdanm 13:0645d8841f51 347 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
bogdanm 13:0645d8841f51 348 {
bogdanm 13:0645d8841f51 349 __ASM volatile ("wfe");
bogdanm 13:0645d8841f51 350 }
bogdanm 13:0645d8841f51 351
bogdanm 13:0645d8841f51 352
bogdanm 13:0645d8841f51 353 /** \brief Send Event
bogdanm 13:0645d8841f51 354
bogdanm 13:0645d8841f51 355 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
bogdanm 13:0645d8841f51 356 */
bogdanm 13:0645d8841f51 357 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
bogdanm 13:0645d8841f51 358 {
bogdanm 13:0645d8841f51 359 __ASM volatile ("sev");
bogdanm 13:0645d8841f51 360 }
bogdanm 13:0645d8841f51 361
bogdanm 13:0645d8841f51 362
bogdanm 13:0645d8841f51 363 /** \brief Instruction Synchronization Barrier
bogdanm 13:0645d8841f51 364
bogdanm 13:0645d8841f51 365 Instruction Synchronization Barrier flushes the pipeline in the processor,
bogdanm 13:0645d8841f51 366 so that all instructions following the ISB are fetched from cache or
bogdanm 13:0645d8841f51 367 memory, after the instruction has been completed.
bogdanm 13:0645d8841f51 368 */
bogdanm 13:0645d8841f51 369 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
bogdanm 13:0645d8841f51 370 {
bogdanm 13:0645d8841f51 371 __ASM volatile ("isb");
bogdanm 13:0645d8841f51 372 }
bogdanm 13:0645d8841f51 373
bogdanm 13:0645d8841f51 374
bogdanm 13:0645d8841f51 375 /** \brief Data Synchronization Barrier
bogdanm 13:0645d8841f51 376
bogdanm 13:0645d8841f51 377 This function acts as a special kind of Data Memory Barrier.
bogdanm 13:0645d8841f51 378 It completes when all explicit memory accesses before this instruction complete.
bogdanm 13:0645d8841f51 379 */
bogdanm 13:0645d8841f51 380 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
bogdanm 13:0645d8841f51 381 {
bogdanm 13:0645d8841f51 382 __ASM volatile ("dsb");
bogdanm 13:0645d8841f51 383 }
bogdanm 13:0645d8841f51 384
bogdanm 13:0645d8841f51 385
bogdanm 13:0645d8841f51 386 /** \brief Data Memory Barrier
bogdanm 13:0645d8841f51 387
bogdanm 13:0645d8841f51 388 This function ensures the apparent order of the explicit memory operations before
bogdanm 13:0645d8841f51 389 and after the instruction, without ensuring their completion.
bogdanm 13:0645d8841f51 390 */
bogdanm 13:0645d8841f51 391 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
bogdanm 13:0645d8841f51 392 {
bogdanm 13:0645d8841f51 393 __ASM volatile ("dmb");
bogdanm 13:0645d8841f51 394 }
bogdanm 13:0645d8841f51 395
bogdanm 13:0645d8841f51 396
bogdanm 13:0645d8841f51 397 /** \brief Reverse byte order (32 bit)
bogdanm 13:0645d8841f51 398
bogdanm 13:0645d8841f51 399 This function reverses the byte order in integer value.
bogdanm 13:0645d8841f51 400
bogdanm 13:0645d8841f51 401 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 402 \return Reversed value
bogdanm 13:0645d8841f51 403 */
bogdanm 13:0645d8841f51 404 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
bogdanm 13:0645d8841f51 405 {
bogdanm 13:0645d8841f51 406 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
bogdanm 13:0645d8841f51 407 return __builtin_bswap32(value);
bogdanm 13:0645d8841f51 408 #else
bogdanm 13:0645d8841f51 409 uint32_t result;
bogdanm 13:0645d8841f51 410
bogdanm 13:0645d8841f51 411 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
bogdanm 13:0645d8841f51 412 return(result);
bogdanm 13:0645d8841f51 413 #endif
bogdanm 13:0645d8841f51 414 }
bogdanm 13:0645d8841f51 415
bogdanm 13:0645d8841f51 416
bogdanm 13:0645d8841f51 417 /** \brief Reverse byte order (16 bit)
bogdanm 13:0645d8841f51 418
bogdanm 13:0645d8841f51 419 This function reverses the byte order in two unsigned short values.
bogdanm 13:0645d8841f51 420
bogdanm 13:0645d8841f51 421 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 422 \return Reversed value
bogdanm 13:0645d8841f51 423 */
bogdanm 13:0645d8841f51 424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
bogdanm 13:0645d8841f51 425 {
bogdanm 13:0645d8841f51 426 uint32_t result;
bogdanm 13:0645d8841f51 427
bogdanm 13:0645d8841f51 428 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
bogdanm 13:0645d8841f51 429 return(result);
bogdanm 13:0645d8841f51 430 }
bogdanm 13:0645d8841f51 431
bogdanm 13:0645d8841f51 432
bogdanm 13:0645d8841f51 433 /** \brief Reverse byte order in signed short value
bogdanm 13:0645d8841f51 434
bogdanm 13:0645d8841f51 435 This function reverses the byte order in a signed short value with sign extension to integer.
bogdanm 13:0645d8841f51 436
bogdanm 13:0645d8841f51 437 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 438 \return Reversed value
bogdanm 13:0645d8841f51 439 */
bogdanm 13:0645d8841f51 440 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
bogdanm 13:0645d8841f51 441 {
bogdanm 13:0645d8841f51 442 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
bogdanm 13:0645d8841f51 443 return (short)__builtin_bswap16(value);
bogdanm 13:0645d8841f51 444 #else
bogdanm 13:0645d8841f51 445 uint32_t result;
bogdanm 13:0645d8841f51 446
bogdanm 13:0645d8841f51 447 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
bogdanm 13:0645d8841f51 448 return(result);
bogdanm 13:0645d8841f51 449 #endif
bogdanm 13:0645d8841f51 450 }
bogdanm 13:0645d8841f51 451
bogdanm 13:0645d8841f51 452
bogdanm 13:0645d8841f51 453 /** \brief Rotate Right in unsigned value (32 bit)
bogdanm 13:0645d8841f51 454
bogdanm 13:0645d8841f51 455 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
bogdanm 13:0645d8841f51 456
bogdanm 13:0645d8841f51 457 \param [in] value Value to rotate
bogdanm 13:0645d8841f51 458 \param [in] value Number of Bits to rotate
bogdanm 13:0645d8841f51 459 \return Rotated value
bogdanm 13:0645d8841f51 460 */
bogdanm 13:0645d8841f51 461 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 462 {
bogdanm 13:0645d8841f51 463 return (op1 >> op2) | (op1 << (32 - op2));
bogdanm 13:0645d8841f51 464 }
bogdanm 13:0645d8841f51 465
bogdanm 13:0645d8841f51 466
bogdanm 13:0645d8841f51 467 /** \brief Breakpoint
bogdanm 13:0645d8841f51 468
bogdanm 13:0645d8841f51 469 This function causes the processor to enter Debug state.
bogdanm 13:0645d8841f51 470 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
bogdanm 13:0645d8841f51 471
bogdanm 13:0645d8841f51 472 \param [in] value is ignored by the processor.
bogdanm 13:0645d8841f51 473 If required, a debugger can use it to store additional information about the breakpoint.
bogdanm 13:0645d8841f51 474 */
bogdanm 13:0645d8841f51 475 #define __BKPT(value) __ASM volatile ("bkpt "#value)
bogdanm 13:0645d8841f51 476
bogdanm 13:0645d8841f51 477
bogdanm 13:0645d8841f51 478 #if (__CORTEX_M >= 0x03)
bogdanm 13:0645d8841f51 479
bogdanm 13:0645d8841f51 480 /** \brief Reverse bit order of value
bogdanm 13:0645d8841f51 481
bogdanm 13:0645d8841f51 482 This function reverses the bit order of the given value.
bogdanm 13:0645d8841f51 483
bogdanm 13:0645d8841f51 484 \param [in] value Value to reverse
bogdanm 13:0645d8841f51 485 \return Reversed value
bogdanm 13:0645d8841f51 486 */
bogdanm 13:0645d8841f51 487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
bogdanm 13:0645d8841f51 488 {
bogdanm 13:0645d8841f51 489 uint32_t result;
bogdanm 13:0645d8841f51 490
bogdanm 13:0645d8841f51 491 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
bogdanm 13:0645d8841f51 492 return(result);
bogdanm 13:0645d8841f51 493 }
bogdanm 13:0645d8841f51 494
bogdanm 13:0645d8841f51 495
bogdanm 13:0645d8841f51 496 /** \brief LDR Exclusive (8 bit)
bogdanm 13:0645d8841f51 497
bogdanm 13:0645d8841f51 498 This function performs a exclusive LDR command for 8 bit value.
bogdanm 13:0645d8841f51 499
bogdanm 13:0645d8841f51 500 \param [in] ptr Pointer to data
bogdanm 13:0645d8841f51 501 \return value of type uint8_t at (*ptr)
bogdanm 13:0645d8841f51 502 */
bogdanm 13:0645d8841f51 503 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
bogdanm 13:0645d8841f51 504 {
bogdanm 13:0645d8841f51 505 uint32_t result;
bogdanm 13:0645d8841f51 506
bogdanm 13:0645d8841f51 507 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
bogdanm 13:0645d8841f51 508 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
bogdanm 13:0645d8841f51 509 #else
bogdanm 13:0645d8841f51 510 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
bogdanm 13:0645d8841f51 511 accepted by assembler. So has to use following less efficient pattern.
bogdanm 13:0645d8841f51 512 */
bogdanm 13:0645d8841f51 513 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
bogdanm 13:0645d8841f51 514 #endif
bogdanm 13:0645d8841f51 515 return(result);
bogdanm 13:0645d8841f51 516 }
bogdanm 13:0645d8841f51 517
bogdanm 13:0645d8841f51 518
bogdanm 13:0645d8841f51 519 /** \brief LDR Exclusive (16 bit)
bogdanm 13:0645d8841f51 520
bogdanm 13:0645d8841f51 521 This function performs a exclusive LDR command for 16 bit values.
bogdanm 13:0645d8841f51 522
bogdanm 13:0645d8841f51 523 \param [in] ptr Pointer to data
bogdanm 13:0645d8841f51 524 \return value of type uint16_t at (*ptr)
bogdanm 13:0645d8841f51 525 */
bogdanm 13:0645d8841f51 526 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
bogdanm 13:0645d8841f51 527 {
bogdanm 13:0645d8841f51 528 uint32_t result;
bogdanm 13:0645d8841f51 529
bogdanm 13:0645d8841f51 530 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
bogdanm 13:0645d8841f51 531 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
bogdanm 13:0645d8841f51 532 #else
bogdanm 13:0645d8841f51 533 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
bogdanm 13:0645d8841f51 534 accepted by assembler. So has to use following less efficient pattern.
bogdanm 13:0645d8841f51 535 */
bogdanm 13:0645d8841f51 536 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
bogdanm 13:0645d8841f51 537 #endif
bogdanm 13:0645d8841f51 538 return(result);
bogdanm 13:0645d8841f51 539 }
bogdanm 13:0645d8841f51 540
bogdanm 13:0645d8841f51 541
bogdanm 13:0645d8841f51 542 /** \brief LDR Exclusive (32 bit)
bogdanm 13:0645d8841f51 543
bogdanm 13:0645d8841f51 544 This function performs a exclusive LDR command for 32 bit values.
bogdanm 13:0645d8841f51 545
bogdanm 13:0645d8841f51 546 \param [in] ptr Pointer to data
bogdanm 13:0645d8841f51 547 \return value of type uint32_t at (*ptr)
bogdanm 13:0645d8841f51 548 */
bogdanm 13:0645d8841f51 549 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
bogdanm 13:0645d8841f51 550 {
bogdanm 13:0645d8841f51 551 uint32_t result;
bogdanm 13:0645d8841f51 552
bogdanm 13:0645d8841f51 553 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
bogdanm 13:0645d8841f51 554 return(result);
bogdanm 13:0645d8841f51 555 }
bogdanm 13:0645d8841f51 556
bogdanm 13:0645d8841f51 557
bogdanm 13:0645d8841f51 558 /** \brief STR Exclusive (8 bit)
bogdanm 13:0645d8841f51 559
bogdanm 13:0645d8841f51 560 This function performs a exclusive STR command for 8 bit values.
bogdanm 13:0645d8841f51 561
bogdanm 13:0645d8841f51 562 \param [in] value Value to store
bogdanm 13:0645d8841f51 563 \param [in] ptr Pointer to location
bogdanm 13:0645d8841f51 564 \return 0 Function succeeded
bogdanm 13:0645d8841f51 565 \return 1 Function failed
bogdanm 13:0645d8841f51 566 */
bogdanm 13:0645d8841f51 567 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
bogdanm 13:0645d8841f51 568 {
bogdanm 13:0645d8841f51 569 uint32_t result;
bogdanm 13:0645d8841f51 570
bogdanm 13:0645d8841f51 571 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
bogdanm 13:0645d8841f51 572 return(result);
bogdanm 13:0645d8841f51 573 }
bogdanm 13:0645d8841f51 574
bogdanm 13:0645d8841f51 575
bogdanm 13:0645d8841f51 576 /** \brief STR Exclusive (16 bit)
bogdanm 13:0645d8841f51 577
bogdanm 13:0645d8841f51 578 This function performs a exclusive STR command for 16 bit values.
bogdanm 13:0645d8841f51 579
bogdanm 13:0645d8841f51 580 \param [in] value Value to store
bogdanm 13:0645d8841f51 581 \param [in] ptr Pointer to location
bogdanm 13:0645d8841f51 582 \return 0 Function succeeded
bogdanm 13:0645d8841f51 583 \return 1 Function failed
bogdanm 13:0645d8841f51 584 */
bogdanm 13:0645d8841f51 585 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
bogdanm 13:0645d8841f51 586 {
bogdanm 13:0645d8841f51 587 uint32_t result;
bogdanm 13:0645d8841f51 588
bogdanm 13:0645d8841f51 589 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
bogdanm 13:0645d8841f51 590 return(result);
bogdanm 13:0645d8841f51 591 }
bogdanm 13:0645d8841f51 592
bogdanm 13:0645d8841f51 593
bogdanm 13:0645d8841f51 594 /** \brief STR Exclusive (32 bit)
bogdanm 13:0645d8841f51 595
bogdanm 13:0645d8841f51 596 This function performs a exclusive STR command for 32 bit values.
bogdanm 13:0645d8841f51 597
bogdanm 13:0645d8841f51 598 \param [in] value Value to store
bogdanm 13:0645d8841f51 599 \param [in] ptr Pointer to location
bogdanm 13:0645d8841f51 600 \return 0 Function succeeded
bogdanm 13:0645d8841f51 601 \return 1 Function failed
bogdanm 13:0645d8841f51 602 */
bogdanm 13:0645d8841f51 603 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
bogdanm 13:0645d8841f51 604 {
bogdanm 13:0645d8841f51 605 uint32_t result;
bogdanm 13:0645d8841f51 606
bogdanm 13:0645d8841f51 607 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
bogdanm 13:0645d8841f51 608 return(result);
bogdanm 13:0645d8841f51 609 }
bogdanm 13:0645d8841f51 610
bogdanm 13:0645d8841f51 611
bogdanm 13:0645d8841f51 612 /** \brief Remove the exclusive lock
bogdanm 13:0645d8841f51 613
bogdanm 13:0645d8841f51 614 This function removes the exclusive lock which is created by LDREX.
bogdanm 13:0645d8841f51 615
bogdanm 13:0645d8841f51 616 */
bogdanm 13:0645d8841f51 617 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
bogdanm 13:0645d8841f51 618 {
bogdanm 13:0645d8841f51 619 __ASM volatile ("clrex" ::: "memory");
bogdanm 13:0645d8841f51 620 }
bogdanm 13:0645d8841f51 621
bogdanm 13:0645d8841f51 622
bogdanm 13:0645d8841f51 623 /** \brief Signed Saturate
bogdanm 13:0645d8841f51 624
bogdanm 13:0645d8841f51 625 This function saturates a signed value.
bogdanm 13:0645d8841f51 626
bogdanm 13:0645d8841f51 627 \param [in] value Value to be saturated
bogdanm 13:0645d8841f51 628 \param [in] sat Bit position to saturate to (1..32)
bogdanm 13:0645d8841f51 629 \return Saturated value
bogdanm 13:0645d8841f51 630 */
bogdanm 13:0645d8841f51 631 #define __SSAT(ARG1,ARG2) \
bogdanm 13:0645d8841f51 632 ({ \
bogdanm 13:0645d8841f51 633 uint32_t __RES, __ARG1 = (ARG1); \
bogdanm 13:0645d8841f51 634 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
bogdanm 13:0645d8841f51 635 __RES; \
bogdanm 13:0645d8841f51 636 })
bogdanm 13:0645d8841f51 637
bogdanm 13:0645d8841f51 638
bogdanm 13:0645d8841f51 639 /** \brief Unsigned Saturate
bogdanm 13:0645d8841f51 640
bogdanm 13:0645d8841f51 641 This function saturates an unsigned value.
bogdanm 13:0645d8841f51 642
bogdanm 13:0645d8841f51 643 \param [in] value Value to be saturated
bogdanm 13:0645d8841f51 644 \param [in] sat Bit position to saturate to (0..31)
bogdanm 13:0645d8841f51 645 \return Saturated value
bogdanm 13:0645d8841f51 646 */
bogdanm 13:0645d8841f51 647 #define __USAT(ARG1,ARG2) \
bogdanm 13:0645d8841f51 648 ({ \
bogdanm 13:0645d8841f51 649 uint32_t __RES, __ARG1 = (ARG1); \
bogdanm 13:0645d8841f51 650 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
bogdanm 13:0645d8841f51 651 __RES; \
bogdanm 13:0645d8841f51 652 })
bogdanm 13:0645d8841f51 653
bogdanm 13:0645d8841f51 654
bogdanm 13:0645d8841f51 655 /** \brief Count leading zeros
bogdanm 13:0645d8841f51 656
bogdanm 13:0645d8841f51 657 This function counts the number of leading zeros of a data value.
bogdanm 13:0645d8841f51 658
bogdanm 13:0645d8841f51 659 \param [in] value Value to count the leading zeros
bogdanm 13:0645d8841f51 660 \return number of leading zeros in value
bogdanm 13:0645d8841f51 661 */
bogdanm 13:0645d8841f51 662 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
bogdanm 13:0645d8841f51 663 {
bogdanm 13:0645d8841f51 664 uint32_t result;
bogdanm 13:0645d8841f51 665
bogdanm 13:0645d8841f51 666 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
bogdanm 13:0645d8841f51 667 return(result);
bogdanm 13:0645d8841f51 668 }
bogdanm 13:0645d8841f51 669
bogdanm 13:0645d8841f51 670 #endif /* (__CORTEX_M >= 0x03) */
bogdanm 13:0645d8841f51 671
bogdanm 13:0645d8841f51 672
bogdanm 13:0645d8841f51 673
bogdanm 13:0645d8841f51 674
bogdanm 13:0645d8841f51 675 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
bogdanm 13:0645d8841f51 676 /* TASKING carm specific functions */
bogdanm 13:0645d8841f51 677
bogdanm 13:0645d8841f51 678 /*
bogdanm 13:0645d8841f51 679 * The CMSIS functions have been implemented as intrinsics in the compiler.
bogdanm 13:0645d8841f51 680 * Please use "carm -?i" to get an up to date list of all intrinsics,
bogdanm 13:0645d8841f51 681 * Including the CMSIS ones.
bogdanm 13:0645d8841f51 682 */
bogdanm 13:0645d8841f51 683
bogdanm 13:0645d8841f51 684 #endif
bogdanm 13:0645d8841f51 685
bogdanm 13:0645d8841f51 686 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
bogdanm 13:0645d8841f51 687
bogdanm 13:0645d8841f51 688 #endif /* __CORE_CMINSTR_H */