pro vyuku PSS v Jecne

Committer:
vladvana
Date:
Sun Sep 24 12:31:52 2017 +0000
Revision:
0:23d1f73bf130
podklady pro cviceni z PSS

Who changed what in which revision?

UserRevisionLine numberNew contents of line
vladvana 0:23d1f73bf130 1 /**************************************************************************//**
vladvana 0:23d1f73bf130 2 * @file core_cmInstr.h
vladvana 0:23d1f73bf130 3 * @brief CMSIS Cortex-M Core Instruction Access Header File
vladvana 0:23d1f73bf130 4 * @version V4.10
vladvana 0:23d1f73bf130 5 * @date 18. March 2015
vladvana 0:23d1f73bf130 6 *
vladvana 0:23d1f73bf130 7 * @note
vladvana 0:23d1f73bf130 8 *
vladvana 0:23d1f73bf130 9 ******************************************************************************/
vladvana 0:23d1f73bf130 10 /* Copyright (c) 2009 - 2014 ARM LIMITED
vladvana 0:23d1f73bf130 11
vladvana 0:23d1f73bf130 12 All rights reserved.
vladvana 0:23d1f73bf130 13 Redistribution and use in source and binary forms, with or without
vladvana 0:23d1f73bf130 14 modification, are permitted provided that the following conditions are met:
vladvana 0:23d1f73bf130 15 - Redistributions of source code must retain the above copyright
vladvana 0:23d1f73bf130 16 notice, this list of conditions and the following disclaimer.
vladvana 0:23d1f73bf130 17 - Redistributions in binary form must reproduce the above copyright
vladvana 0:23d1f73bf130 18 notice, this list of conditions and the following disclaimer in the
vladvana 0:23d1f73bf130 19 documentation and/or other materials provided with the distribution.
vladvana 0:23d1f73bf130 20 - Neither the name of ARM nor the names of its contributors may be used
vladvana 0:23d1f73bf130 21 to endorse or promote products derived from this software without
vladvana 0:23d1f73bf130 22 specific prior written permission.
vladvana 0:23d1f73bf130 23 *
vladvana 0:23d1f73bf130 24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
vladvana 0:23d1f73bf130 25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
vladvana 0:23d1f73bf130 26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
vladvana 0:23d1f73bf130 27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
vladvana 0:23d1f73bf130 28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
vladvana 0:23d1f73bf130 29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
vladvana 0:23d1f73bf130 30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
vladvana 0:23d1f73bf130 31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
vladvana 0:23d1f73bf130 32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
vladvana 0:23d1f73bf130 33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
vladvana 0:23d1f73bf130 34 POSSIBILITY OF SUCH DAMAGE.
vladvana 0:23d1f73bf130 35 ---------------------------------------------------------------------------*/
vladvana 0:23d1f73bf130 36
vladvana 0:23d1f73bf130 37
vladvana 0:23d1f73bf130 38 #ifndef __CORE_CMINSTR_H
vladvana 0:23d1f73bf130 39 #define __CORE_CMINSTR_H
vladvana 0:23d1f73bf130 40
vladvana 0:23d1f73bf130 41
vladvana 0:23d1f73bf130 42 /* ########################## Core Instruction Access ######################### */
vladvana 0:23d1f73bf130 43 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
vladvana 0:23d1f73bf130 44 Access to dedicated instructions
vladvana 0:23d1f73bf130 45 @{
vladvana 0:23d1f73bf130 46 */
vladvana 0:23d1f73bf130 47
vladvana 0:23d1f73bf130 48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
vladvana 0:23d1f73bf130 49 /* ARM armcc specific functions */
vladvana 0:23d1f73bf130 50
vladvana 0:23d1f73bf130 51 #if (__ARMCC_VERSION < 400677)
vladvana 0:23d1f73bf130 52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
vladvana 0:23d1f73bf130 53 #endif
vladvana 0:23d1f73bf130 54
vladvana 0:23d1f73bf130 55
vladvana 0:23d1f73bf130 56 /** \brief No Operation
vladvana 0:23d1f73bf130 57
vladvana 0:23d1f73bf130 58 No Operation does nothing. This instruction can be used for code alignment purposes.
vladvana 0:23d1f73bf130 59 */
vladvana 0:23d1f73bf130 60 #define __NOP __nop
vladvana 0:23d1f73bf130 61
vladvana 0:23d1f73bf130 62
vladvana 0:23d1f73bf130 63 /** \brief Wait For Interrupt
vladvana 0:23d1f73bf130 64
vladvana 0:23d1f73bf130 65 Wait For Interrupt is a hint instruction that suspends execution
vladvana 0:23d1f73bf130 66 until one of a number of events occurs.
vladvana 0:23d1f73bf130 67 */
vladvana 0:23d1f73bf130 68 #define __WFI __wfi
vladvana 0:23d1f73bf130 69
vladvana 0:23d1f73bf130 70
vladvana 0:23d1f73bf130 71 /** \brief Wait For Event
vladvana 0:23d1f73bf130 72
vladvana 0:23d1f73bf130 73 Wait For Event is a hint instruction that permits the processor to enter
vladvana 0:23d1f73bf130 74 a low-power state until one of a number of events occurs.
vladvana 0:23d1f73bf130 75 */
vladvana 0:23d1f73bf130 76 #define __WFE __wfe
vladvana 0:23d1f73bf130 77
vladvana 0:23d1f73bf130 78
vladvana 0:23d1f73bf130 79 /** \brief Send Event
vladvana 0:23d1f73bf130 80
vladvana 0:23d1f73bf130 81 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
vladvana 0:23d1f73bf130 82 */
vladvana 0:23d1f73bf130 83 #define __SEV __sev
vladvana 0:23d1f73bf130 84
vladvana 0:23d1f73bf130 85
vladvana 0:23d1f73bf130 86 /** \brief Instruction Synchronization Barrier
vladvana 0:23d1f73bf130 87
vladvana 0:23d1f73bf130 88 Instruction Synchronization Barrier flushes the pipeline in the processor,
vladvana 0:23d1f73bf130 89 so that all instructions following the ISB are fetched from cache or
vladvana 0:23d1f73bf130 90 memory, after the instruction has been completed.
vladvana 0:23d1f73bf130 91 */
vladvana 0:23d1f73bf130 92 #define __ISB() do {\
vladvana 0:23d1f73bf130 93 __schedule_barrier();\
vladvana 0:23d1f73bf130 94 __isb(0xF);\
vladvana 0:23d1f73bf130 95 __schedule_barrier();\
vladvana 0:23d1f73bf130 96 } while (0)
vladvana 0:23d1f73bf130 97
vladvana 0:23d1f73bf130 98 /** \brief Data Synchronization Barrier
vladvana 0:23d1f73bf130 99
vladvana 0:23d1f73bf130 100 This function acts as a special kind of Data Memory Barrier.
vladvana 0:23d1f73bf130 101 It completes when all explicit memory accesses before this instruction complete.
vladvana 0:23d1f73bf130 102 */
vladvana 0:23d1f73bf130 103 #define __DSB() do {\
vladvana 0:23d1f73bf130 104 __schedule_barrier();\
vladvana 0:23d1f73bf130 105 __dsb(0xF);\
vladvana 0:23d1f73bf130 106 __schedule_barrier();\
vladvana 0:23d1f73bf130 107 } while (0)
vladvana 0:23d1f73bf130 108
vladvana 0:23d1f73bf130 109 /** \brief Data Memory Barrier
vladvana 0:23d1f73bf130 110
vladvana 0:23d1f73bf130 111 This function ensures the apparent order of the explicit memory operations before
vladvana 0:23d1f73bf130 112 and after the instruction, without ensuring their completion.
vladvana 0:23d1f73bf130 113 */
vladvana 0:23d1f73bf130 114 #define __DMB() do {\
vladvana 0:23d1f73bf130 115 __schedule_barrier();\
vladvana 0:23d1f73bf130 116 __dmb(0xF);\
vladvana 0:23d1f73bf130 117 __schedule_barrier();\
vladvana 0:23d1f73bf130 118 } while (0)
vladvana 0:23d1f73bf130 119
vladvana 0:23d1f73bf130 120 /** \brief Reverse byte order (32 bit)
vladvana 0:23d1f73bf130 121
vladvana 0:23d1f73bf130 122 This function reverses the byte order in integer value.
vladvana 0:23d1f73bf130 123
vladvana 0:23d1f73bf130 124 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 125 \return Reversed value
vladvana 0:23d1f73bf130 126 */
vladvana 0:23d1f73bf130 127 #define __REV __rev
vladvana 0:23d1f73bf130 128
vladvana 0:23d1f73bf130 129
vladvana 0:23d1f73bf130 130 /** \brief Reverse byte order (16 bit)
vladvana 0:23d1f73bf130 131
vladvana 0:23d1f73bf130 132 This function reverses the byte order in two unsigned short values.
vladvana 0:23d1f73bf130 133
vladvana 0:23d1f73bf130 134 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 135 \return Reversed value
vladvana 0:23d1f73bf130 136 */
vladvana 0:23d1f73bf130 137 #ifndef __NO_EMBEDDED_ASM
vladvana 0:23d1f73bf130 138 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
vladvana 0:23d1f73bf130 139 {
vladvana 0:23d1f73bf130 140 rev16 r0, r0
vladvana 0:23d1f73bf130 141 bx lr
vladvana 0:23d1f73bf130 142 }
vladvana 0:23d1f73bf130 143 #endif
vladvana 0:23d1f73bf130 144
vladvana 0:23d1f73bf130 145 /** \brief Reverse byte order in signed short value
vladvana 0:23d1f73bf130 146
vladvana 0:23d1f73bf130 147 This function reverses the byte order in a signed short value with sign extension to integer.
vladvana 0:23d1f73bf130 148
vladvana 0:23d1f73bf130 149 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 150 \return Reversed value
vladvana 0:23d1f73bf130 151 */
vladvana 0:23d1f73bf130 152 #ifndef __NO_EMBEDDED_ASM
vladvana 0:23d1f73bf130 153 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
vladvana 0:23d1f73bf130 154 {
vladvana 0:23d1f73bf130 155 revsh r0, r0
vladvana 0:23d1f73bf130 156 bx lr
vladvana 0:23d1f73bf130 157 }
vladvana 0:23d1f73bf130 158 #endif
vladvana 0:23d1f73bf130 159
vladvana 0:23d1f73bf130 160
vladvana 0:23d1f73bf130 161 /** \brief Rotate Right in unsigned value (32 bit)
vladvana 0:23d1f73bf130 162
vladvana 0:23d1f73bf130 163 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
vladvana 0:23d1f73bf130 164
vladvana 0:23d1f73bf130 165 \param [in] value Value to rotate
vladvana 0:23d1f73bf130 166 \param [in] value Number of Bits to rotate
vladvana 0:23d1f73bf130 167 \return Rotated value
vladvana 0:23d1f73bf130 168 */
vladvana 0:23d1f73bf130 169 #define __ROR __ror
vladvana 0:23d1f73bf130 170
vladvana 0:23d1f73bf130 171
vladvana 0:23d1f73bf130 172 /** \brief Breakpoint
vladvana 0:23d1f73bf130 173
vladvana 0:23d1f73bf130 174 This function causes the processor to enter Debug state.
vladvana 0:23d1f73bf130 175 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
vladvana 0:23d1f73bf130 176
vladvana 0:23d1f73bf130 177 \param [in] value is ignored by the processor.
vladvana 0:23d1f73bf130 178 If required, a debugger can use it to store additional information about the breakpoint.
vladvana 0:23d1f73bf130 179 */
vladvana 0:23d1f73bf130 180 #define __BKPT(value) __breakpoint(value)
vladvana 0:23d1f73bf130 181
vladvana 0:23d1f73bf130 182
vladvana 0:23d1f73bf130 183 /** \brief Reverse bit order of value
vladvana 0:23d1f73bf130 184
vladvana 0:23d1f73bf130 185 This function reverses the bit order of the given value.
vladvana 0:23d1f73bf130 186
vladvana 0:23d1f73bf130 187 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 188 \return Reversed value
vladvana 0:23d1f73bf130 189 */
vladvana 0:23d1f73bf130 190 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
vladvana 0:23d1f73bf130 191 #define __RBIT __rbit
vladvana 0:23d1f73bf130 192 #else
vladvana 0:23d1f73bf130 193 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
vladvana 0:23d1f73bf130 194 {
vladvana 0:23d1f73bf130 195 uint32_t result;
vladvana 0:23d1f73bf130 196 int32_t s = 4 /*sizeof(v)*/ * 8 - 1; // extra shift needed at end
vladvana 0:23d1f73bf130 197
vladvana 0:23d1f73bf130 198 result = value; // r will be reversed bits of v; first get LSB of v
vladvana 0:23d1f73bf130 199 for (value >>= 1; value; value >>= 1)
vladvana 0:23d1f73bf130 200 {
vladvana 0:23d1f73bf130 201 result <<= 1;
vladvana 0:23d1f73bf130 202 result |= value & 1;
vladvana 0:23d1f73bf130 203 s--;
vladvana 0:23d1f73bf130 204 }
vladvana 0:23d1f73bf130 205 result <<= s; // shift when v's highest bits are zero
vladvana 0:23d1f73bf130 206 return(result);
vladvana 0:23d1f73bf130 207 }
vladvana 0:23d1f73bf130 208 #endif
vladvana 0:23d1f73bf130 209
vladvana 0:23d1f73bf130 210
vladvana 0:23d1f73bf130 211 /** \brief Count leading zeros
vladvana 0:23d1f73bf130 212
vladvana 0:23d1f73bf130 213 This function counts the number of leading zeros of a data value.
vladvana 0:23d1f73bf130 214
vladvana 0:23d1f73bf130 215 \param [in] value Value to count the leading zeros
vladvana 0:23d1f73bf130 216 \return number of leading zeros in value
vladvana 0:23d1f73bf130 217 */
vladvana 0:23d1f73bf130 218 #define __CLZ __clz
vladvana 0:23d1f73bf130 219
vladvana 0:23d1f73bf130 220
vladvana 0:23d1f73bf130 221 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
vladvana 0:23d1f73bf130 222
vladvana 0:23d1f73bf130 223 /** \brief LDR Exclusive (8 bit)
vladvana 0:23d1f73bf130 224
vladvana 0:23d1f73bf130 225 This function executes a exclusive LDR instruction for 8 bit value.
vladvana 0:23d1f73bf130 226
vladvana 0:23d1f73bf130 227 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 228 \return value of type uint8_t at (*ptr)
vladvana 0:23d1f73bf130 229 */
vladvana 0:23d1f73bf130 230 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
vladvana 0:23d1f73bf130 231
vladvana 0:23d1f73bf130 232
vladvana 0:23d1f73bf130 233 /** \brief LDR Exclusive (16 bit)
vladvana 0:23d1f73bf130 234
vladvana 0:23d1f73bf130 235 This function executes a exclusive LDR instruction for 16 bit values.
vladvana 0:23d1f73bf130 236
vladvana 0:23d1f73bf130 237 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 238 \return value of type uint16_t at (*ptr)
vladvana 0:23d1f73bf130 239 */
vladvana 0:23d1f73bf130 240 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
vladvana 0:23d1f73bf130 241
vladvana 0:23d1f73bf130 242
vladvana 0:23d1f73bf130 243 /** \brief LDR Exclusive (32 bit)
vladvana 0:23d1f73bf130 244
vladvana 0:23d1f73bf130 245 This function executes a exclusive LDR instruction for 32 bit values.
vladvana 0:23d1f73bf130 246
vladvana 0:23d1f73bf130 247 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 248 \return value of type uint32_t at (*ptr)
vladvana 0:23d1f73bf130 249 */
vladvana 0:23d1f73bf130 250 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
vladvana 0:23d1f73bf130 251
vladvana 0:23d1f73bf130 252
vladvana 0:23d1f73bf130 253 /** \brief STR Exclusive (8 bit)
vladvana 0:23d1f73bf130 254
vladvana 0:23d1f73bf130 255 This function executes a exclusive STR instruction for 8 bit values.
vladvana 0:23d1f73bf130 256
vladvana 0:23d1f73bf130 257 \param [in] value Value to store
vladvana 0:23d1f73bf130 258 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 259 \return 0 Function succeeded
vladvana 0:23d1f73bf130 260 \return 1 Function failed
vladvana 0:23d1f73bf130 261 */
vladvana 0:23d1f73bf130 262 #define __STREXB(value, ptr) __strex(value, ptr)
vladvana 0:23d1f73bf130 263
vladvana 0:23d1f73bf130 264
vladvana 0:23d1f73bf130 265 /** \brief STR Exclusive (16 bit)
vladvana 0:23d1f73bf130 266
vladvana 0:23d1f73bf130 267 This function executes a exclusive STR instruction for 16 bit values.
vladvana 0:23d1f73bf130 268
vladvana 0:23d1f73bf130 269 \param [in] value Value to store
vladvana 0:23d1f73bf130 270 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 271 \return 0 Function succeeded
vladvana 0:23d1f73bf130 272 \return 1 Function failed
vladvana 0:23d1f73bf130 273 */
vladvana 0:23d1f73bf130 274 #define __STREXH(value, ptr) __strex(value, ptr)
vladvana 0:23d1f73bf130 275
vladvana 0:23d1f73bf130 276
vladvana 0:23d1f73bf130 277 /** \brief STR Exclusive (32 bit)
vladvana 0:23d1f73bf130 278
vladvana 0:23d1f73bf130 279 This function executes a exclusive STR instruction for 32 bit values.
vladvana 0:23d1f73bf130 280
vladvana 0:23d1f73bf130 281 \param [in] value Value to store
vladvana 0:23d1f73bf130 282 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 283 \return 0 Function succeeded
vladvana 0:23d1f73bf130 284 \return 1 Function failed
vladvana 0:23d1f73bf130 285 */
vladvana 0:23d1f73bf130 286 #define __STREXW(value, ptr) __strex(value, ptr)
vladvana 0:23d1f73bf130 287
vladvana 0:23d1f73bf130 288
vladvana 0:23d1f73bf130 289 /** \brief Remove the exclusive lock
vladvana 0:23d1f73bf130 290
vladvana 0:23d1f73bf130 291 This function removes the exclusive lock which is created by LDREX.
vladvana 0:23d1f73bf130 292
vladvana 0:23d1f73bf130 293 */
vladvana 0:23d1f73bf130 294 #define __CLREX __clrex
vladvana 0:23d1f73bf130 295
vladvana 0:23d1f73bf130 296
vladvana 0:23d1f73bf130 297 /** \brief Signed Saturate
vladvana 0:23d1f73bf130 298
vladvana 0:23d1f73bf130 299 This function saturates a signed value.
vladvana 0:23d1f73bf130 300
vladvana 0:23d1f73bf130 301 \param [in] value Value to be saturated
vladvana 0:23d1f73bf130 302 \param [in] sat Bit position to saturate to (1..32)
vladvana 0:23d1f73bf130 303 \return Saturated value
vladvana 0:23d1f73bf130 304 */
vladvana 0:23d1f73bf130 305 #define __SSAT __ssat
vladvana 0:23d1f73bf130 306
vladvana 0:23d1f73bf130 307
vladvana 0:23d1f73bf130 308 /** \brief Unsigned Saturate
vladvana 0:23d1f73bf130 309
vladvana 0:23d1f73bf130 310 This function saturates an unsigned value.
vladvana 0:23d1f73bf130 311
vladvana 0:23d1f73bf130 312 \param [in] value Value to be saturated
vladvana 0:23d1f73bf130 313 \param [in] sat Bit position to saturate to (0..31)
vladvana 0:23d1f73bf130 314 \return Saturated value
vladvana 0:23d1f73bf130 315 */
vladvana 0:23d1f73bf130 316 #define __USAT __usat
vladvana 0:23d1f73bf130 317
vladvana 0:23d1f73bf130 318
vladvana 0:23d1f73bf130 319 /** \brief Rotate Right with Extend (32 bit)
vladvana 0:23d1f73bf130 320
vladvana 0:23d1f73bf130 321 This function moves each bit of a bitstring right by one bit.
vladvana 0:23d1f73bf130 322 The carry input is shifted in at the left end of the bitstring.
vladvana 0:23d1f73bf130 323
vladvana 0:23d1f73bf130 324 \param [in] value Value to rotate
vladvana 0:23d1f73bf130 325 \return Rotated value
vladvana 0:23d1f73bf130 326 */
vladvana 0:23d1f73bf130 327 #ifndef __NO_EMBEDDED_ASM
vladvana 0:23d1f73bf130 328 __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
vladvana 0:23d1f73bf130 329 {
vladvana 0:23d1f73bf130 330 rrx r0, r0
vladvana 0:23d1f73bf130 331 bx lr
vladvana 0:23d1f73bf130 332 }
vladvana 0:23d1f73bf130 333 #endif
vladvana 0:23d1f73bf130 334
vladvana 0:23d1f73bf130 335
vladvana 0:23d1f73bf130 336 /** \brief LDRT Unprivileged (8 bit)
vladvana 0:23d1f73bf130 337
vladvana 0:23d1f73bf130 338 This function executes a Unprivileged LDRT instruction for 8 bit value.
vladvana 0:23d1f73bf130 339
vladvana 0:23d1f73bf130 340 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 341 \return value of type uint8_t at (*ptr)
vladvana 0:23d1f73bf130 342 */
vladvana 0:23d1f73bf130 343 #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr))
vladvana 0:23d1f73bf130 344
vladvana 0:23d1f73bf130 345
vladvana 0:23d1f73bf130 346 /** \brief LDRT Unprivileged (16 bit)
vladvana 0:23d1f73bf130 347
vladvana 0:23d1f73bf130 348 This function executes a Unprivileged LDRT instruction for 16 bit values.
vladvana 0:23d1f73bf130 349
vladvana 0:23d1f73bf130 350 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 351 \return value of type uint16_t at (*ptr)
vladvana 0:23d1f73bf130 352 */
vladvana 0:23d1f73bf130 353 #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr))
vladvana 0:23d1f73bf130 354
vladvana 0:23d1f73bf130 355
vladvana 0:23d1f73bf130 356 /** \brief LDRT Unprivileged (32 bit)
vladvana 0:23d1f73bf130 357
vladvana 0:23d1f73bf130 358 This function executes a Unprivileged LDRT instruction for 32 bit values.
vladvana 0:23d1f73bf130 359
vladvana 0:23d1f73bf130 360 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 361 \return value of type uint32_t at (*ptr)
vladvana 0:23d1f73bf130 362 */
vladvana 0:23d1f73bf130 363 #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr))
vladvana 0:23d1f73bf130 364
vladvana 0:23d1f73bf130 365
vladvana 0:23d1f73bf130 366 /** \brief STRT Unprivileged (8 bit)
vladvana 0:23d1f73bf130 367
vladvana 0:23d1f73bf130 368 This function executes a Unprivileged STRT instruction for 8 bit values.
vladvana 0:23d1f73bf130 369
vladvana 0:23d1f73bf130 370 \param [in] value Value to store
vladvana 0:23d1f73bf130 371 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 372 */
vladvana 0:23d1f73bf130 373 #define __STRBT(value, ptr) __strt(value, ptr)
vladvana 0:23d1f73bf130 374
vladvana 0:23d1f73bf130 375
vladvana 0:23d1f73bf130 376 /** \brief STRT Unprivileged (16 bit)
vladvana 0:23d1f73bf130 377
vladvana 0:23d1f73bf130 378 This function executes a Unprivileged STRT instruction for 16 bit values.
vladvana 0:23d1f73bf130 379
vladvana 0:23d1f73bf130 380 \param [in] value Value to store
vladvana 0:23d1f73bf130 381 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 382 */
vladvana 0:23d1f73bf130 383 #define __STRHT(value, ptr) __strt(value, ptr)
vladvana 0:23d1f73bf130 384
vladvana 0:23d1f73bf130 385
vladvana 0:23d1f73bf130 386 /** \brief STRT Unprivileged (32 bit)
vladvana 0:23d1f73bf130 387
vladvana 0:23d1f73bf130 388 This function executes a Unprivileged STRT instruction for 32 bit values.
vladvana 0:23d1f73bf130 389
vladvana 0:23d1f73bf130 390 \param [in] value Value to store
vladvana 0:23d1f73bf130 391 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 392 */
vladvana 0:23d1f73bf130 393 #define __STRT(value, ptr) __strt(value, ptr)
vladvana 0:23d1f73bf130 394
vladvana 0:23d1f73bf130 395 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
vladvana 0:23d1f73bf130 396
vladvana 0:23d1f73bf130 397
vladvana 0:23d1f73bf130 398 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
vladvana 0:23d1f73bf130 399 /* GNU gcc specific functions */
vladvana 0:23d1f73bf130 400
vladvana 0:23d1f73bf130 401 /* Define macros for porting to both thumb1 and thumb2.
vladvana 0:23d1f73bf130 402 * For thumb1, use low register (r0-r7), specified by constrant "l"
vladvana 0:23d1f73bf130 403 * Otherwise, use general registers, specified by constrant "r" */
vladvana 0:23d1f73bf130 404 #if defined (__thumb__) && !defined (__thumb2__)
vladvana 0:23d1f73bf130 405 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
vladvana 0:23d1f73bf130 406 #define __CMSIS_GCC_USE_REG(r) "l" (r)
vladvana 0:23d1f73bf130 407 #else
vladvana 0:23d1f73bf130 408 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
vladvana 0:23d1f73bf130 409 #define __CMSIS_GCC_USE_REG(r) "r" (r)
vladvana 0:23d1f73bf130 410 #endif
vladvana 0:23d1f73bf130 411
vladvana 0:23d1f73bf130 412 /** \brief No Operation
vladvana 0:23d1f73bf130 413
vladvana 0:23d1f73bf130 414 No Operation does nothing. This instruction can be used for code alignment purposes.
vladvana 0:23d1f73bf130 415 */
vladvana 0:23d1f73bf130 416 __attribute__((always_inline)) __STATIC_INLINE void __NOP(void)
vladvana 0:23d1f73bf130 417 {
vladvana 0:23d1f73bf130 418 __ASM volatile ("nop");
vladvana 0:23d1f73bf130 419 }
vladvana 0:23d1f73bf130 420
vladvana 0:23d1f73bf130 421
vladvana 0:23d1f73bf130 422 /** \brief Wait For Interrupt
vladvana 0:23d1f73bf130 423
vladvana 0:23d1f73bf130 424 Wait For Interrupt is a hint instruction that suspends execution
vladvana 0:23d1f73bf130 425 until one of a number of events occurs.
vladvana 0:23d1f73bf130 426 */
vladvana 0:23d1f73bf130 427 __attribute__((always_inline)) __STATIC_INLINE void __WFI(void)
vladvana 0:23d1f73bf130 428 {
vladvana 0:23d1f73bf130 429 __ASM volatile ("wfi");
vladvana 0:23d1f73bf130 430 }
vladvana 0:23d1f73bf130 431
vladvana 0:23d1f73bf130 432
vladvana 0:23d1f73bf130 433 /** \brief Wait For Event
vladvana 0:23d1f73bf130 434
vladvana 0:23d1f73bf130 435 Wait For Event is a hint instruction that permits the processor to enter
vladvana 0:23d1f73bf130 436 a low-power state until one of a number of events occurs.
vladvana 0:23d1f73bf130 437 */
vladvana 0:23d1f73bf130 438 __attribute__((always_inline)) __STATIC_INLINE void __WFE(void)
vladvana 0:23d1f73bf130 439 {
vladvana 0:23d1f73bf130 440 __ASM volatile ("wfe");
vladvana 0:23d1f73bf130 441 }
vladvana 0:23d1f73bf130 442
vladvana 0:23d1f73bf130 443
vladvana 0:23d1f73bf130 444 /** \brief Send Event
vladvana 0:23d1f73bf130 445
vladvana 0:23d1f73bf130 446 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
vladvana 0:23d1f73bf130 447 */
vladvana 0:23d1f73bf130 448 __attribute__((always_inline)) __STATIC_INLINE void __SEV(void)
vladvana 0:23d1f73bf130 449 {
vladvana 0:23d1f73bf130 450 __ASM volatile ("sev");
vladvana 0:23d1f73bf130 451 }
vladvana 0:23d1f73bf130 452
vladvana 0:23d1f73bf130 453
vladvana 0:23d1f73bf130 454 /** \brief Instruction Synchronization Barrier
vladvana 0:23d1f73bf130 455
vladvana 0:23d1f73bf130 456 Instruction Synchronization Barrier flushes the pipeline in the processor,
vladvana 0:23d1f73bf130 457 so that all instructions following the ISB are fetched from cache or
vladvana 0:23d1f73bf130 458 memory, after the instruction has been completed.
vladvana 0:23d1f73bf130 459 */
vladvana 0:23d1f73bf130 460 __attribute__((always_inline)) __STATIC_INLINE void __ISB(void)
vladvana 0:23d1f73bf130 461 {
vladvana 0:23d1f73bf130 462 __ASM volatile ("isb 0xF":::"memory");
vladvana 0:23d1f73bf130 463 }
vladvana 0:23d1f73bf130 464
vladvana 0:23d1f73bf130 465
vladvana 0:23d1f73bf130 466 /** \brief Data Synchronization Barrier
vladvana 0:23d1f73bf130 467
vladvana 0:23d1f73bf130 468 This function acts as a special kind of Data Memory Barrier.
vladvana 0:23d1f73bf130 469 It completes when all explicit memory accesses before this instruction complete.
vladvana 0:23d1f73bf130 470 */
vladvana 0:23d1f73bf130 471 __attribute__((always_inline)) __STATIC_INLINE void __DSB(void)
vladvana 0:23d1f73bf130 472 {
vladvana 0:23d1f73bf130 473 __ASM volatile ("dsb 0xF":::"memory");
vladvana 0:23d1f73bf130 474 }
vladvana 0:23d1f73bf130 475
vladvana 0:23d1f73bf130 476
vladvana 0:23d1f73bf130 477 /** \brief Data Memory Barrier
vladvana 0:23d1f73bf130 478
vladvana 0:23d1f73bf130 479 This function ensures the apparent order of the explicit memory operations before
vladvana 0:23d1f73bf130 480 and after the instruction, without ensuring their completion.
vladvana 0:23d1f73bf130 481 */
vladvana 0:23d1f73bf130 482 __attribute__((always_inline)) __STATIC_INLINE void __DMB(void)
vladvana 0:23d1f73bf130 483 {
vladvana 0:23d1f73bf130 484 __ASM volatile ("dmb 0xF":::"memory");
vladvana 0:23d1f73bf130 485 }
vladvana 0:23d1f73bf130 486
vladvana 0:23d1f73bf130 487
vladvana 0:23d1f73bf130 488 /** \brief Reverse byte order (32 bit)
vladvana 0:23d1f73bf130 489
vladvana 0:23d1f73bf130 490 This function reverses the byte order in integer value.
vladvana 0:23d1f73bf130 491
vladvana 0:23d1f73bf130 492 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 493 \return Reversed value
vladvana 0:23d1f73bf130 494 */
vladvana 0:23d1f73bf130 495 __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV(uint32_t value)
vladvana 0:23d1f73bf130 496 {
vladvana 0:23d1f73bf130 497 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
vladvana 0:23d1f73bf130 498 return __builtin_bswap32(value);
vladvana 0:23d1f73bf130 499 #else
vladvana 0:23d1f73bf130 500 uint32_t result;
vladvana 0:23d1f73bf130 501
vladvana 0:23d1f73bf130 502 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
vladvana 0:23d1f73bf130 503 return(result);
vladvana 0:23d1f73bf130 504 #endif
vladvana 0:23d1f73bf130 505 }
vladvana 0:23d1f73bf130 506
vladvana 0:23d1f73bf130 507
vladvana 0:23d1f73bf130 508 /** \brief Reverse byte order (16 bit)
vladvana 0:23d1f73bf130 509
vladvana 0:23d1f73bf130 510 This function reverses the byte order in two unsigned short values.
vladvana 0:23d1f73bf130 511
vladvana 0:23d1f73bf130 512 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 513 \return Reversed value
vladvana 0:23d1f73bf130 514 */
vladvana 0:23d1f73bf130 515 __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)
vladvana 0:23d1f73bf130 516 {
vladvana 0:23d1f73bf130 517 uint32_t result;
vladvana 0:23d1f73bf130 518
vladvana 0:23d1f73bf130 519 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
vladvana 0:23d1f73bf130 520 return(result);
vladvana 0:23d1f73bf130 521 }
vladvana 0:23d1f73bf130 522
vladvana 0:23d1f73bf130 523
vladvana 0:23d1f73bf130 524 /** \brief Reverse byte order in signed short value
vladvana 0:23d1f73bf130 525
vladvana 0:23d1f73bf130 526 This function reverses the byte order in a signed short value with sign extension to integer.
vladvana 0:23d1f73bf130 527
vladvana 0:23d1f73bf130 528 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 529 \return Reversed value
vladvana 0:23d1f73bf130 530 */
vladvana 0:23d1f73bf130 531 __attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)
vladvana 0:23d1f73bf130 532 {
vladvana 0:23d1f73bf130 533 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
vladvana 0:23d1f73bf130 534 return (short)__builtin_bswap16(value);
vladvana 0:23d1f73bf130 535 #else
vladvana 0:23d1f73bf130 536 uint32_t result;
vladvana 0:23d1f73bf130 537
vladvana 0:23d1f73bf130 538 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
vladvana 0:23d1f73bf130 539 return(result);
vladvana 0:23d1f73bf130 540 #endif
vladvana 0:23d1f73bf130 541 }
vladvana 0:23d1f73bf130 542
vladvana 0:23d1f73bf130 543
vladvana 0:23d1f73bf130 544 /** \brief Rotate Right in unsigned value (32 bit)
vladvana 0:23d1f73bf130 545
vladvana 0:23d1f73bf130 546 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
vladvana 0:23d1f73bf130 547
vladvana 0:23d1f73bf130 548 \param [in] value Value to rotate
vladvana 0:23d1f73bf130 549 \param [in] value Number of Bits to rotate
vladvana 0:23d1f73bf130 550 \return Rotated value
vladvana 0:23d1f73bf130 551 */
vladvana 0:23d1f73bf130 552 __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
vladvana 0:23d1f73bf130 553 {
vladvana 0:23d1f73bf130 554 return (op1 >> op2) | (op1 << (32 - op2));
vladvana 0:23d1f73bf130 555 }
vladvana 0:23d1f73bf130 556
vladvana 0:23d1f73bf130 557
vladvana 0:23d1f73bf130 558 /** \brief Breakpoint
vladvana 0:23d1f73bf130 559
vladvana 0:23d1f73bf130 560 This function causes the processor to enter Debug state.
vladvana 0:23d1f73bf130 561 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
vladvana 0:23d1f73bf130 562
vladvana 0:23d1f73bf130 563 \param [in] value is ignored by the processor.
vladvana 0:23d1f73bf130 564 If required, a debugger can use it to store additional information about the breakpoint.
vladvana 0:23d1f73bf130 565 */
vladvana 0:23d1f73bf130 566 #define __BKPT(value) __ASM volatile ("bkpt "#value)
vladvana 0:23d1f73bf130 567
vladvana 0:23d1f73bf130 568
vladvana 0:23d1f73bf130 569 /** \brief Reverse bit order of value
vladvana 0:23d1f73bf130 570
vladvana 0:23d1f73bf130 571 This function reverses the bit order of the given value.
vladvana 0:23d1f73bf130 572
vladvana 0:23d1f73bf130 573 \param [in] value Value to reverse
vladvana 0:23d1f73bf130 574 \return Reversed value
vladvana 0:23d1f73bf130 575 */
vladvana 0:23d1f73bf130 576 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
vladvana 0:23d1f73bf130 577 {
vladvana 0:23d1f73bf130 578 uint32_t result;
vladvana 0:23d1f73bf130 579
vladvana 0:23d1f73bf130 580 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
vladvana 0:23d1f73bf130 581 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
vladvana 0:23d1f73bf130 582 #else
vladvana 0:23d1f73bf130 583 int32_t s = 4 /*sizeof(v)*/ * 8 - 1; // extra shift needed at end
vladvana 0:23d1f73bf130 584
vladvana 0:23d1f73bf130 585 result = value; // r will be reversed bits of v; first get LSB of v
vladvana 0:23d1f73bf130 586 for (value >>= 1; value; value >>= 1)
vladvana 0:23d1f73bf130 587 {
vladvana 0:23d1f73bf130 588 result <<= 1;
vladvana 0:23d1f73bf130 589 result |= value & 1;
vladvana 0:23d1f73bf130 590 s--;
vladvana 0:23d1f73bf130 591 }
vladvana 0:23d1f73bf130 592 result <<= s; // shift when v's highest bits are zero
vladvana 0:23d1f73bf130 593 #endif
vladvana 0:23d1f73bf130 594 return(result);
vladvana 0:23d1f73bf130 595 }
vladvana 0:23d1f73bf130 596
vladvana 0:23d1f73bf130 597
vladvana 0:23d1f73bf130 598 /** \brief Count leading zeros
vladvana 0:23d1f73bf130 599
vladvana 0:23d1f73bf130 600 This function counts the number of leading zeros of a data value.
vladvana 0:23d1f73bf130 601
vladvana 0:23d1f73bf130 602 \param [in] value Value to count the leading zeros
vladvana 0:23d1f73bf130 603 \return number of leading zeros in value
vladvana 0:23d1f73bf130 604 */
vladvana 0:23d1f73bf130 605 #define __CLZ __builtin_clz
vladvana 0:23d1f73bf130 606
vladvana 0:23d1f73bf130 607
vladvana 0:23d1f73bf130 608 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
vladvana 0:23d1f73bf130 609
vladvana 0:23d1f73bf130 610 /** \brief LDR Exclusive (8 bit)
vladvana 0:23d1f73bf130 611
vladvana 0:23d1f73bf130 612 This function executes a exclusive LDR instruction for 8 bit value.
vladvana 0:23d1f73bf130 613
vladvana 0:23d1f73bf130 614 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 615 \return value of type uint8_t at (*ptr)
vladvana 0:23d1f73bf130 616 */
vladvana 0:23d1f73bf130 617 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
vladvana 0:23d1f73bf130 618 {
vladvana 0:23d1f73bf130 619 uint32_t result;
vladvana 0:23d1f73bf130 620
vladvana 0:23d1f73bf130 621 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
vladvana 0:23d1f73bf130 622 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
vladvana 0:23d1f73bf130 623 #else
vladvana 0:23d1f73bf130 624 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
vladvana 0:23d1f73bf130 625 accepted by assembler. So has to use following less efficient pattern.
vladvana 0:23d1f73bf130 626 */
vladvana 0:23d1f73bf130 627 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
vladvana 0:23d1f73bf130 628 #endif
vladvana 0:23d1f73bf130 629 return ((uint8_t) result); /* Add explicit type cast here */
vladvana 0:23d1f73bf130 630 }
vladvana 0:23d1f73bf130 631
vladvana 0:23d1f73bf130 632
vladvana 0:23d1f73bf130 633 /** \brief LDR Exclusive (16 bit)
vladvana 0:23d1f73bf130 634
vladvana 0:23d1f73bf130 635 This function executes a exclusive LDR instruction for 16 bit values.
vladvana 0:23d1f73bf130 636
vladvana 0:23d1f73bf130 637 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 638 \return value of type uint16_t at (*ptr)
vladvana 0:23d1f73bf130 639 */
vladvana 0:23d1f73bf130 640 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
vladvana 0:23d1f73bf130 641 {
vladvana 0:23d1f73bf130 642 uint32_t result;
vladvana 0:23d1f73bf130 643
vladvana 0:23d1f73bf130 644 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
vladvana 0:23d1f73bf130 645 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
vladvana 0:23d1f73bf130 646 #else
vladvana 0:23d1f73bf130 647 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
vladvana 0:23d1f73bf130 648 accepted by assembler. So has to use following less efficient pattern.
vladvana 0:23d1f73bf130 649 */
vladvana 0:23d1f73bf130 650 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
vladvana 0:23d1f73bf130 651 #endif
vladvana 0:23d1f73bf130 652 return ((uint16_t) result); /* Add explicit type cast here */
vladvana 0:23d1f73bf130 653 }
vladvana 0:23d1f73bf130 654
vladvana 0:23d1f73bf130 655
vladvana 0:23d1f73bf130 656 /** \brief LDR Exclusive (32 bit)
vladvana 0:23d1f73bf130 657
vladvana 0:23d1f73bf130 658 This function executes a exclusive LDR instruction for 32 bit values.
vladvana 0:23d1f73bf130 659
vladvana 0:23d1f73bf130 660 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 661 \return value of type uint32_t at (*ptr)
vladvana 0:23d1f73bf130 662 */
vladvana 0:23d1f73bf130 663 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
vladvana 0:23d1f73bf130 664 {
vladvana 0:23d1f73bf130 665 uint32_t result;
vladvana 0:23d1f73bf130 666
vladvana 0:23d1f73bf130 667 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
vladvana 0:23d1f73bf130 668 return(result);
vladvana 0:23d1f73bf130 669 }
vladvana 0:23d1f73bf130 670
vladvana 0:23d1f73bf130 671
vladvana 0:23d1f73bf130 672 /** \brief STR Exclusive (8 bit)
vladvana 0:23d1f73bf130 673
vladvana 0:23d1f73bf130 674 This function executes a exclusive STR instruction for 8 bit values.
vladvana 0:23d1f73bf130 675
vladvana 0:23d1f73bf130 676 \param [in] value Value to store
vladvana 0:23d1f73bf130 677 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 678 \return 0 Function succeeded
vladvana 0:23d1f73bf130 679 \return 1 Function failed
vladvana 0:23d1f73bf130 680 */
vladvana 0:23d1f73bf130 681 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
vladvana 0:23d1f73bf130 682 {
vladvana 0:23d1f73bf130 683 uint32_t result;
vladvana 0:23d1f73bf130 684
vladvana 0:23d1f73bf130 685 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
vladvana 0:23d1f73bf130 686 return(result);
vladvana 0:23d1f73bf130 687 }
vladvana 0:23d1f73bf130 688
vladvana 0:23d1f73bf130 689
vladvana 0:23d1f73bf130 690 /** \brief STR Exclusive (16 bit)
vladvana 0:23d1f73bf130 691
vladvana 0:23d1f73bf130 692 This function executes a exclusive STR instruction for 16 bit values.
vladvana 0:23d1f73bf130 693
vladvana 0:23d1f73bf130 694 \param [in] value Value to store
vladvana 0:23d1f73bf130 695 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 696 \return 0 Function succeeded
vladvana 0:23d1f73bf130 697 \return 1 Function failed
vladvana 0:23d1f73bf130 698 */
vladvana 0:23d1f73bf130 699 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
vladvana 0:23d1f73bf130 700 {
vladvana 0:23d1f73bf130 701 uint32_t result;
vladvana 0:23d1f73bf130 702
vladvana 0:23d1f73bf130 703 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
vladvana 0:23d1f73bf130 704 return(result);
vladvana 0:23d1f73bf130 705 }
vladvana 0:23d1f73bf130 706
vladvana 0:23d1f73bf130 707
vladvana 0:23d1f73bf130 708 /** \brief STR Exclusive (32 bit)
vladvana 0:23d1f73bf130 709
vladvana 0:23d1f73bf130 710 This function executes a exclusive STR instruction for 32 bit values.
vladvana 0:23d1f73bf130 711
vladvana 0:23d1f73bf130 712 \param [in] value Value to store
vladvana 0:23d1f73bf130 713 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 714 \return 0 Function succeeded
vladvana 0:23d1f73bf130 715 \return 1 Function failed
vladvana 0:23d1f73bf130 716 */
vladvana 0:23d1f73bf130 717 __attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
vladvana 0:23d1f73bf130 718 {
vladvana 0:23d1f73bf130 719 uint32_t result;
vladvana 0:23d1f73bf130 720
vladvana 0:23d1f73bf130 721 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
vladvana 0:23d1f73bf130 722 return(result);
vladvana 0:23d1f73bf130 723 }
vladvana 0:23d1f73bf130 724
vladvana 0:23d1f73bf130 725
vladvana 0:23d1f73bf130 726 /** \brief Remove the exclusive lock
vladvana 0:23d1f73bf130 727
vladvana 0:23d1f73bf130 728 This function removes the exclusive lock which is created by LDREX.
vladvana 0:23d1f73bf130 729
vladvana 0:23d1f73bf130 730 */
vladvana 0:23d1f73bf130 731 __attribute__((always_inline)) __STATIC_INLINE void __CLREX(void)
vladvana 0:23d1f73bf130 732 {
vladvana 0:23d1f73bf130 733 __ASM volatile ("clrex" ::: "memory");
vladvana 0:23d1f73bf130 734 }
vladvana 0:23d1f73bf130 735
vladvana 0:23d1f73bf130 736
vladvana 0:23d1f73bf130 737 /** \brief Signed Saturate
vladvana 0:23d1f73bf130 738
vladvana 0:23d1f73bf130 739 This function saturates a signed value.
vladvana 0:23d1f73bf130 740
vladvana 0:23d1f73bf130 741 \param [in] value Value to be saturated
vladvana 0:23d1f73bf130 742 \param [in] sat Bit position to saturate to (1..32)
vladvana 0:23d1f73bf130 743 \return Saturated value
vladvana 0:23d1f73bf130 744 */
vladvana 0:23d1f73bf130 745 #define __SSAT(ARG1,ARG2) \
vladvana 0:23d1f73bf130 746 ({ \
vladvana 0:23d1f73bf130 747 uint32_t __RES, __ARG1 = (ARG1); \
vladvana 0:23d1f73bf130 748 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
vladvana 0:23d1f73bf130 749 __RES; \
vladvana 0:23d1f73bf130 750 })
vladvana 0:23d1f73bf130 751
vladvana 0:23d1f73bf130 752
vladvana 0:23d1f73bf130 753 /** \brief Unsigned Saturate
vladvana 0:23d1f73bf130 754
vladvana 0:23d1f73bf130 755 This function saturates an unsigned value.
vladvana 0:23d1f73bf130 756
vladvana 0:23d1f73bf130 757 \param [in] value Value to be saturated
vladvana 0:23d1f73bf130 758 \param [in] sat Bit position to saturate to (0..31)
vladvana 0:23d1f73bf130 759 \return Saturated value
vladvana 0:23d1f73bf130 760 */
vladvana 0:23d1f73bf130 761 #define __USAT(ARG1,ARG2) \
vladvana 0:23d1f73bf130 762 ({ \
vladvana 0:23d1f73bf130 763 uint32_t __RES, __ARG1 = (ARG1); \
vladvana 0:23d1f73bf130 764 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
vladvana 0:23d1f73bf130 765 __RES; \
vladvana 0:23d1f73bf130 766 })
vladvana 0:23d1f73bf130 767
vladvana 0:23d1f73bf130 768
vladvana 0:23d1f73bf130 769 /** \brief Rotate Right with Extend (32 bit)
vladvana 0:23d1f73bf130 770
vladvana 0:23d1f73bf130 771 This function moves each bit of a bitstring right by one bit.
vladvana 0:23d1f73bf130 772 The carry input is shifted in at the left end of the bitstring.
vladvana 0:23d1f73bf130 773
vladvana 0:23d1f73bf130 774 \param [in] value Value to rotate
vladvana 0:23d1f73bf130 775 \return Rotated value
vladvana 0:23d1f73bf130 776 */
vladvana 0:23d1f73bf130 777 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)
vladvana 0:23d1f73bf130 778 {
vladvana 0:23d1f73bf130 779 uint32_t result;
vladvana 0:23d1f73bf130 780
vladvana 0:23d1f73bf130 781 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
vladvana 0:23d1f73bf130 782 return(result);
vladvana 0:23d1f73bf130 783 }
vladvana 0:23d1f73bf130 784
vladvana 0:23d1f73bf130 785
vladvana 0:23d1f73bf130 786 /** \brief LDRT Unprivileged (8 bit)
vladvana 0:23d1f73bf130 787
vladvana 0:23d1f73bf130 788 This function executes a Unprivileged LDRT instruction for 8 bit value.
vladvana 0:23d1f73bf130 789
vladvana 0:23d1f73bf130 790 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 791 \return value of type uint8_t at (*ptr)
vladvana 0:23d1f73bf130 792 */
vladvana 0:23d1f73bf130 793 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr)
vladvana 0:23d1f73bf130 794 {
vladvana 0:23d1f73bf130 795 uint32_t result;
vladvana 0:23d1f73bf130 796
vladvana 0:23d1f73bf130 797 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
vladvana 0:23d1f73bf130 798 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) );
vladvana 0:23d1f73bf130 799 #else
vladvana 0:23d1f73bf130 800 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
vladvana 0:23d1f73bf130 801 accepted by assembler. So has to use following less efficient pattern.
vladvana 0:23d1f73bf130 802 */
vladvana 0:23d1f73bf130 803 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
vladvana 0:23d1f73bf130 804 #endif
vladvana 0:23d1f73bf130 805 return ((uint8_t) result); /* Add explicit type cast here */
vladvana 0:23d1f73bf130 806 }
vladvana 0:23d1f73bf130 807
vladvana 0:23d1f73bf130 808
vladvana 0:23d1f73bf130 809 /** \brief LDRT Unprivileged (16 bit)
vladvana 0:23d1f73bf130 810
vladvana 0:23d1f73bf130 811 This function executes a Unprivileged LDRT instruction for 16 bit values.
vladvana 0:23d1f73bf130 812
vladvana 0:23d1f73bf130 813 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 814 \return value of type uint16_t at (*ptr)
vladvana 0:23d1f73bf130 815 */
vladvana 0:23d1f73bf130 816 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr)
vladvana 0:23d1f73bf130 817 {
vladvana 0:23d1f73bf130 818 uint32_t result;
vladvana 0:23d1f73bf130 819
vladvana 0:23d1f73bf130 820 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
vladvana 0:23d1f73bf130 821 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) );
vladvana 0:23d1f73bf130 822 #else
vladvana 0:23d1f73bf130 823 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
vladvana 0:23d1f73bf130 824 accepted by assembler. So has to use following less efficient pattern.
vladvana 0:23d1f73bf130 825 */
vladvana 0:23d1f73bf130 826 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
vladvana 0:23d1f73bf130 827 #endif
vladvana 0:23d1f73bf130 828 return ((uint16_t) result); /* Add explicit type cast here */
vladvana 0:23d1f73bf130 829 }
vladvana 0:23d1f73bf130 830
vladvana 0:23d1f73bf130 831
vladvana 0:23d1f73bf130 832 /** \brief LDRT Unprivileged (32 bit)
vladvana 0:23d1f73bf130 833
vladvana 0:23d1f73bf130 834 This function executes a Unprivileged LDRT instruction for 32 bit values.
vladvana 0:23d1f73bf130 835
vladvana 0:23d1f73bf130 836 \param [in] ptr Pointer to data
vladvana 0:23d1f73bf130 837 \return value of type uint32_t at (*ptr)
vladvana 0:23d1f73bf130 838 */
vladvana 0:23d1f73bf130 839 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr)
vladvana 0:23d1f73bf130 840 {
vladvana 0:23d1f73bf130 841 uint32_t result;
vladvana 0:23d1f73bf130 842
vladvana 0:23d1f73bf130 843 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) );
vladvana 0:23d1f73bf130 844 return(result);
vladvana 0:23d1f73bf130 845 }
vladvana 0:23d1f73bf130 846
vladvana 0:23d1f73bf130 847
vladvana 0:23d1f73bf130 848 /** \brief STRT Unprivileged (8 bit)
vladvana 0:23d1f73bf130 849
vladvana 0:23d1f73bf130 850 This function executes a Unprivileged STRT instruction for 8 bit values.
vladvana 0:23d1f73bf130 851
vladvana 0:23d1f73bf130 852 \param [in] value Value to store
vladvana 0:23d1f73bf130 853 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 854 */
vladvana 0:23d1f73bf130 855 __attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr)
vladvana 0:23d1f73bf130 856 {
vladvana 0:23d1f73bf130 857 __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
vladvana 0:23d1f73bf130 858 }
vladvana 0:23d1f73bf130 859
vladvana 0:23d1f73bf130 860
vladvana 0:23d1f73bf130 861 /** \brief STRT Unprivileged (16 bit)
vladvana 0:23d1f73bf130 862
vladvana 0:23d1f73bf130 863 This function executes a Unprivileged STRT instruction for 16 bit values.
vladvana 0:23d1f73bf130 864
vladvana 0:23d1f73bf130 865 \param [in] value Value to store
vladvana 0:23d1f73bf130 866 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 867 */
vladvana 0:23d1f73bf130 868 __attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr)
vladvana 0:23d1f73bf130 869 {
vladvana 0:23d1f73bf130 870 __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
vladvana 0:23d1f73bf130 871 }
vladvana 0:23d1f73bf130 872
vladvana 0:23d1f73bf130 873
vladvana 0:23d1f73bf130 874 /** \brief STRT Unprivileged (32 bit)
vladvana 0:23d1f73bf130 875
vladvana 0:23d1f73bf130 876 This function executes a Unprivileged STRT instruction for 32 bit values.
vladvana 0:23d1f73bf130 877
vladvana 0:23d1f73bf130 878 \param [in] value Value to store
vladvana 0:23d1f73bf130 879 \param [in] ptr Pointer to location
vladvana 0:23d1f73bf130 880 */
vladvana 0:23d1f73bf130 881 __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr)
vladvana 0:23d1f73bf130 882 {
vladvana 0:23d1f73bf130 883 __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) );
vladvana 0:23d1f73bf130 884 }
vladvana 0:23d1f73bf130 885
vladvana 0:23d1f73bf130 886 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
vladvana 0:23d1f73bf130 887
vladvana 0:23d1f73bf130 888
vladvana 0:23d1f73bf130 889 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
vladvana 0:23d1f73bf130 890 /* IAR iccarm specific functions */
vladvana 0:23d1f73bf130 891 #include <cmsis_iar.h>
vladvana 0:23d1f73bf130 892
vladvana 0:23d1f73bf130 893
vladvana 0:23d1f73bf130 894 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
vladvana 0:23d1f73bf130 895 /* TI CCS specific functions */
vladvana 0:23d1f73bf130 896 #include <cmsis_ccs.h>
vladvana 0:23d1f73bf130 897
vladvana 0:23d1f73bf130 898
vladvana 0:23d1f73bf130 899 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
vladvana 0:23d1f73bf130 900 /* TASKING carm specific functions */
vladvana 0:23d1f73bf130 901 /*
vladvana 0:23d1f73bf130 902 * The CMSIS functions have been implemented as intrinsics in the compiler.
vladvana 0:23d1f73bf130 903 * Please use "carm -?i" to get an up to date list of all intrinsics,
vladvana 0:23d1f73bf130 904 * Including the CMSIS ones.
vladvana 0:23d1f73bf130 905 */
vladvana 0:23d1f73bf130 906
vladvana 0:23d1f73bf130 907
vladvana 0:23d1f73bf130 908 #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/
vladvana 0:23d1f73bf130 909 /* Cosmic specific functions */
vladvana 0:23d1f73bf130 910 #include <cmsis_csm.h>
vladvana 0:23d1f73bf130 911
vladvana 0:23d1f73bf130 912 #endif
vladvana 0:23d1f73bf130 913
vladvana 0:23d1f73bf130 914 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
vladvana 0:23d1f73bf130 915
vladvana 0:23d1f73bf130 916 #endif /* __CORE_CMINSTR_H */