Integrating the ublox LISA C200 modem

Fork of SprintUSBModemHTTPClientTest by Donatien Garnier

Committer:
sam_grove
Date:
Thu Sep 26 00:44:20 2013 -0500
Revision:
5:3f93dd1d4cb3
Exported program and replaced contents of the repo with the source
to build and debug using keil mdk. Libs NOT upto date are lwip, lwip-sys
and socket. these have newer versions under mbed_official but were starting
from a know working point

Who changed what in which revision?

UserRevisionLine numberNew contents of line
sam_grove 5:3f93dd1d4cb3 1 /**************************************************************************//**
sam_grove 5:3f93dd1d4cb3 2 * @file core_cmInstr.h
sam_grove 5:3f93dd1d4cb3 3 * @brief CMSIS Cortex-M Core Instruction Access Header File
sam_grove 5:3f93dd1d4cb3 4 * @version V3.20
sam_grove 5:3f93dd1d4cb3 5 * @date 05. March 2013
sam_grove 5:3f93dd1d4cb3 6 *
sam_grove 5:3f93dd1d4cb3 7 * @note
sam_grove 5:3f93dd1d4cb3 8 *
sam_grove 5:3f93dd1d4cb3 9 ******************************************************************************/
sam_grove 5:3f93dd1d4cb3 10 /* Copyright (c) 2009 - 2013 ARM LIMITED
sam_grove 5:3f93dd1d4cb3 11
sam_grove 5:3f93dd1d4cb3 12 All rights reserved.
sam_grove 5:3f93dd1d4cb3 13 Redistribution and use in source and binary forms, with or without
sam_grove 5:3f93dd1d4cb3 14 modification, are permitted provided that the following conditions are met:
sam_grove 5:3f93dd1d4cb3 15 - Redistributions of source code must retain the above copyright
sam_grove 5:3f93dd1d4cb3 16 notice, this list of conditions and the following disclaimer.
sam_grove 5:3f93dd1d4cb3 17 - Redistributions in binary form must reproduce the above copyright
sam_grove 5:3f93dd1d4cb3 18 notice, this list of conditions and the following disclaimer in the
sam_grove 5:3f93dd1d4cb3 19 documentation and/or other materials provided with the distribution.
sam_grove 5:3f93dd1d4cb3 20 - Neither the name of ARM nor the names of its contributors may be used
sam_grove 5:3f93dd1d4cb3 21 to endorse or promote products derived from this software without
sam_grove 5:3f93dd1d4cb3 22 specific prior written permission.
sam_grove 5:3f93dd1d4cb3 23 *
sam_grove 5:3f93dd1d4cb3 24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
sam_grove 5:3f93dd1d4cb3 25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
sam_grove 5:3f93dd1d4cb3 26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
sam_grove 5:3f93dd1d4cb3 27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
sam_grove 5:3f93dd1d4cb3 28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
sam_grove 5:3f93dd1d4cb3 29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
sam_grove 5:3f93dd1d4cb3 30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
sam_grove 5:3f93dd1d4cb3 31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
sam_grove 5:3f93dd1d4cb3 32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
sam_grove 5:3f93dd1d4cb3 33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
sam_grove 5:3f93dd1d4cb3 34 POSSIBILITY OF SUCH DAMAGE.
sam_grove 5:3f93dd1d4cb3 35 ---------------------------------------------------------------------------*/
sam_grove 5:3f93dd1d4cb3 36
sam_grove 5:3f93dd1d4cb3 37
sam_grove 5:3f93dd1d4cb3 38 #ifndef __CORE_CMINSTR_H
sam_grove 5:3f93dd1d4cb3 39 #define __CORE_CMINSTR_H
sam_grove 5:3f93dd1d4cb3 40
sam_grove 5:3f93dd1d4cb3 41
sam_grove 5:3f93dd1d4cb3 42 /* ########################## Core Instruction Access ######################### */
sam_grove 5:3f93dd1d4cb3 43 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
sam_grove 5:3f93dd1d4cb3 44 Access to dedicated instructions
sam_grove 5:3f93dd1d4cb3 45 @{
sam_grove 5:3f93dd1d4cb3 46 */
sam_grove 5:3f93dd1d4cb3 47
sam_grove 5:3f93dd1d4cb3 48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
sam_grove 5:3f93dd1d4cb3 49 /* ARM armcc specific functions */
sam_grove 5:3f93dd1d4cb3 50
sam_grove 5:3f93dd1d4cb3 51 #if (__ARMCC_VERSION < 400677)
sam_grove 5:3f93dd1d4cb3 52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
sam_grove 5:3f93dd1d4cb3 53 #endif
sam_grove 5:3f93dd1d4cb3 54
sam_grove 5:3f93dd1d4cb3 55
sam_grove 5:3f93dd1d4cb3 56 /** \brief No Operation
sam_grove 5:3f93dd1d4cb3 57
sam_grove 5:3f93dd1d4cb3 58 No Operation does nothing. This instruction can be used for code alignment purposes.
sam_grove 5:3f93dd1d4cb3 59 */
sam_grove 5:3f93dd1d4cb3 60 #define __NOP __nop
sam_grove 5:3f93dd1d4cb3 61
sam_grove 5:3f93dd1d4cb3 62
sam_grove 5:3f93dd1d4cb3 63 /** \brief Wait For Interrupt
sam_grove 5:3f93dd1d4cb3 64
sam_grove 5:3f93dd1d4cb3 65 Wait For Interrupt is a hint instruction that suspends execution
sam_grove 5:3f93dd1d4cb3 66 until one of a number of events occurs.
sam_grove 5:3f93dd1d4cb3 67 */
sam_grove 5:3f93dd1d4cb3 68 #define __WFI __wfi
sam_grove 5:3f93dd1d4cb3 69
sam_grove 5:3f93dd1d4cb3 70
sam_grove 5:3f93dd1d4cb3 71 /** \brief Wait For Event
sam_grove 5:3f93dd1d4cb3 72
sam_grove 5:3f93dd1d4cb3 73 Wait For Event is a hint instruction that permits the processor to enter
sam_grove 5:3f93dd1d4cb3 74 a low-power state until one of a number of events occurs.
sam_grove 5:3f93dd1d4cb3 75 */
sam_grove 5:3f93dd1d4cb3 76 #define __WFE __wfe
sam_grove 5:3f93dd1d4cb3 77
sam_grove 5:3f93dd1d4cb3 78
sam_grove 5:3f93dd1d4cb3 79 /** \brief Send Event
sam_grove 5:3f93dd1d4cb3 80
sam_grove 5:3f93dd1d4cb3 81 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
sam_grove 5:3f93dd1d4cb3 82 */
sam_grove 5:3f93dd1d4cb3 83 #define __SEV __sev
sam_grove 5:3f93dd1d4cb3 84
sam_grove 5:3f93dd1d4cb3 85
sam_grove 5:3f93dd1d4cb3 86 /** \brief Instruction Synchronization Barrier
sam_grove 5:3f93dd1d4cb3 87
sam_grove 5:3f93dd1d4cb3 88 Instruction Synchronization Barrier flushes the pipeline in the processor,
sam_grove 5:3f93dd1d4cb3 89 so that all instructions following the ISB are fetched from cache or
sam_grove 5:3f93dd1d4cb3 90 memory, after the instruction has been completed.
sam_grove 5:3f93dd1d4cb3 91 */
sam_grove 5:3f93dd1d4cb3 92 #define __ISB() __isb(0xF)
sam_grove 5:3f93dd1d4cb3 93
sam_grove 5:3f93dd1d4cb3 94
sam_grove 5:3f93dd1d4cb3 95 /** \brief Data Synchronization Barrier
sam_grove 5:3f93dd1d4cb3 96
sam_grove 5:3f93dd1d4cb3 97 This function acts as a special kind of Data Memory Barrier.
sam_grove 5:3f93dd1d4cb3 98 It completes when all explicit memory accesses before this instruction complete.
sam_grove 5:3f93dd1d4cb3 99 */
sam_grove 5:3f93dd1d4cb3 100 #define __DSB() __dsb(0xF)
sam_grove 5:3f93dd1d4cb3 101
sam_grove 5:3f93dd1d4cb3 102
sam_grove 5:3f93dd1d4cb3 103 /** \brief Data Memory Barrier
sam_grove 5:3f93dd1d4cb3 104
sam_grove 5:3f93dd1d4cb3 105 This function ensures the apparent order of the explicit memory operations before
sam_grove 5:3f93dd1d4cb3 106 and after the instruction, without ensuring their completion.
sam_grove 5:3f93dd1d4cb3 107 */
sam_grove 5:3f93dd1d4cb3 108 #define __DMB() __dmb(0xF)
sam_grove 5:3f93dd1d4cb3 109
sam_grove 5:3f93dd1d4cb3 110
sam_grove 5:3f93dd1d4cb3 111 /** \brief Reverse byte order (32 bit)
sam_grove 5:3f93dd1d4cb3 112
sam_grove 5:3f93dd1d4cb3 113 This function reverses the byte order in integer value.
sam_grove 5:3f93dd1d4cb3 114
sam_grove 5:3f93dd1d4cb3 115 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 116 \return Reversed value
sam_grove 5:3f93dd1d4cb3 117 */
sam_grove 5:3f93dd1d4cb3 118 #define __REV __rev
sam_grove 5:3f93dd1d4cb3 119
sam_grove 5:3f93dd1d4cb3 120
sam_grove 5:3f93dd1d4cb3 121 /** \brief Reverse byte order (16 bit)
sam_grove 5:3f93dd1d4cb3 122
sam_grove 5:3f93dd1d4cb3 123 This function reverses the byte order in two unsigned short values.
sam_grove 5:3f93dd1d4cb3 124
sam_grove 5:3f93dd1d4cb3 125 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 126 \return Reversed value
sam_grove 5:3f93dd1d4cb3 127 */
sam_grove 5:3f93dd1d4cb3 128 #ifndef __NO_EMBEDDED_ASM
sam_grove 5:3f93dd1d4cb3 129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
sam_grove 5:3f93dd1d4cb3 130 {
sam_grove 5:3f93dd1d4cb3 131 rev16 r0, r0
sam_grove 5:3f93dd1d4cb3 132 bx lr
sam_grove 5:3f93dd1d4cb3 133 }
sam_grove 5:3f93dd1d4cb3 134 #endif
sam_grove 5:3f93dd1d4cb3 135
sam_grove 5:3f93dd1d4cb3 136 /** \brief Reverse byte order in signed short value
sam_grove 5:3f93dd1d4cb3 137
sam_grove 5:3f93dd1d4cb3 138 This function reverses the byte order in a signed short value with sign extension to integer.
sam_grove 5:3f93dd1d4cb3 139
sam_grove 5:3f93dd1d4cb3 140 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 141 \return Reversed value
sam_grove 5:3f93dd1d4cb3 142 */
sam_grove 5:3f93dd1d4cb3 143 #ifndef __NO_EMBEDDED_ASM
sam_grove 5:3f93dd1d4cb3 144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
sam_grove 5:3f93dd1d4cb3 145 {
sam_grove 5:3f93dd1d4cb3 146 revsh r0, r0
sam_grove 5:3f93dd1d4cb3 147 bx lr
sam_grove 5:3f93dd1d4cb3 148 }
sam_grove 5:3f93dd1d4cb3 149 #endif
sam_grove 5:3f93dd1d4cb3 150
sam_grove 5:3f93dd1d4cb3 151
sam_grove 5:3f93dd1d4cb3 152 /** \brief Rotate Right in unsigned value (32 bit)
sam_grove 5:3f93dd1d4cb3 153
sam_grove 5:3f93dd1d4cb3 154 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
sam_grove 5:3f93dd1d4cb3 155
sam_grove 5:3f93dd1d4cb3 156 \param [in] value Value to rotate
sam_grove 5:3f93dd1d4cb3 157 \param [in] value Number of Bits to rotate
sam_grove 5:3f93dd1d4cb3 158 \return Rotated value
sam_grove 5:3f93dd1d4cb3 159 */
sam_grove 5:3f93dd1d4cb3 160 #define __ROR __ror
sam_grove 5:3f93dd1d4cb3 161
sam_grove 5:3f93dd1d4cb3 162
sam_grove 5:3f93dd1d4cb3 163 /** \brief Breakpoint
sam_grove 5:3f93dd1d4cb3 164
sam_grove 5:3f93dd1d4cb3 165 This function causes the processor to enter Debug state.
sam_grove 5:3f93dd1d4cb3 166 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
sam_grove 5:3f93dd1d4cb3 167
sam_grove 5:3f93dd1d4cb3 168 \param [in] value is ignored by the processor.
sam_grove 5:3f93dd1d4cb3 169 If required, a debugger can use it to store additional information about the breakpoint.
sam_grove 5:3f93dd1d4cb3 170 */
sam_grove 5:3f93dd1d4cb3 171 #define __BKPT(value) __breakpoint(value)
sam_grove 5:3f93dd1d4cb3 172
sam_grove 5:3f93dd1d4cb3 173
sam_grove 5:3f93dd1d4cb3 174 #if (__CORTEX_M >= 0x03)
sam_grove 5:3f93dd1d4cb3 175
sam_grove 5:3f93dd1d4cb3 176 /** \brief Reverse bit order of value
sam_grove 5:3f93dd1d4cb3 177
sam_grove 5:3f93dd1d4cb3 178 This function reverses the bit order of the given value.
sam_grove 5:3f93dd1d4cb3 179
sam_grove 5:3f93dd1d4cb3 180 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 181 \return Reversed value
sam_grove 5:3f93dd1d4cb3 182 */
sam_grove 5:3f93dd1d4cb3 183 #define __RBIT __rbit
sam_grove 5:3f93dd1d4cb3 184
sam_grove 5:3f93dd1d4cb3 185
sam_grove 5:3f93dd1d4cb3 186 /** \brief LDR Exclusive (8 bit)
sam_grove 5:3f93dd1d4cb3 187
sam_grove 5:3f93dd1d4cb3 188 This function performs a exclusive LDR command for 8 bit value.
sam_grove 5:3f93dd1d4cb3 189
sam_grove 5:3f93dd1d4cb3 190 \param [in] ptr Pointer to data
sam_grove 5:3f93dd1d4cb3 191 \return value of type uint8_t at (*ptr)
sam_grove 5:3f93dd1d4cb3 192 */
sam_grove 5:3f93dd1d4cb3 193 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
sam_grove 5:3f93dd1d4cb3 194
sam_grove 5:3f93dd1d4cb3 195
sam_grove 5:3f93dd1d4cb3 196 /** \brief LDR Exclusive (16 bit)
sam_grove 5:3f93dd1d4cb3 197
sam_grove 5:3f93dd1d4cb3 198 This function performs a exclusive LDR command for 16 bit values.
sam_grove 5:3f93dd1d4cb3 199
sam_grove 5:3f93dd1d4cb3 200 \param [in] ptr Pointer to data
sam_grove 5:3f93dd1d4cb3 201 \return value of type uint16_t at (*ptr)
sam_grove 5:3f93dd1d4cb3 202 */
sam_grove 5:3f93dd1d4cb3 203 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
sam_grove 5:3f93dd1d4cb3 204
sam_grove 5:3f93dd1d4cb3 205
sam_grove 5:3f93dd1d4cb3 206 /** \brief LDR Exclusive (32 bit)
sam_grove 5:3f93dd1d4cb3 207
sam_grove 5:3f93dd1d4cb3 208 This function performs a exclusive LDR command for 32 bit values.
sam_grove 5:3f93dd1d4cb3 209
sam_grove 5:3f93dd1d4cb3 210 \param [in] ptr Pointer to data
sam_grove 5:3f93dd1d4cb3 211 \return value of type uint32_t at (*ptr)
sam_grove 5:3f93dd1d4cb3 212 */
sam_grove 5:3f93dd1d4cb3 213 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
sam_grove 5:3f93dd1d4cb3 214
sam_grove 5:3f93dd1d4cb3 215
sam_grove 5:3f93dd1d4cb3 216 /** \brief STR Exclusive (8 bit)
sam_grove 5:3f93dd1d4cb3 217
sam_grove 5:3f93dd1d4cb3 218 This function performs a exclusive STR command for 8 bit values.
sam_grove 5:3f93dd1d4cb3 219
sam_grove 5:3f93dd1d4cb3 220 \param [in] value Value to store
sam_grove 5:3f93dd1d4cb3 221 \param [in] ptr Pointer to location
sam_grove 5:3f93dd1d4cb3 222 \return 0 Function succeeded
sam_grove 5:3f93dd1d4cb3 223 \return 1 Function failed
sam_grove 5:3f93dd1d4cb3 224 */
sam_grove 5:3f93dd1d4cb3 225 #define __STREXB(value, ptr) __strex(value, ptr)
sam_grove 5:3f93dd1d4cb3 226
sam_grove 5:3f93dd1d4cb3 227
sam_grove 5:3f93dd1d4cb3 228 /** \brief STR Exclusive (16 bit)
sam_grove 5:3f93dd1d4cb3 229
sam_grove 5:3f93dd1d4cb3 230 This function performs a exclusive STR command for 16 bit values.
sam_grove 5:3f93dd1d4cb3 231
sam_grove 5:3f93dd1d4cb3 232 \param [in] value Value to store
sam_grove 5:3f93dd1d4cb3 233 \param [in] ptr Pointer to location
sam_grove 5:3f93dd1d4cb3 234 \return 0 Function succeeded
sam_grove 5:3f93dd1d4cb3 235 \return 1 Function failed
sam_grove 5:3f93dd1d4cb3 236 */
sam_grove 5:3f93dd1d4cb3 237 #define __STREXH(value, ptr) __strex(value, ptr)
sam_grove 5:3f93dd1d4cb3 238
sam_grove 5:3f93dd1d4cb3 239
sam_grove 5:3f93dd1d4cb3 240 /** \brief STR Exclusive (32 bit)
sam_grove 5:3f93dd1d4cb3 241
sam_grove 5:3f93dd1d4cb3 242 This function performs a exclusive STR command for 32 bit values.
sam_grove 5:3f93dd1d4cb3 243
sam_grove 5:3f93dd1d4cb3 244 \param [in] value Value to store
sam_grove 5:3f93dd1d4cb3 245 \param [in] ptr Pointer to location
sam_grove 5:3f93dd1d4cb3 246 \return 0 Function succeeded
sam_grove 5:3f93dd1d4cb3 247 \return 1 Function failed
sam_grove 5:3f93dd1d4cb3 248 */
sam_grove 5:3f93dd1d4cb3 249 #define __STREXW(value, ptr) __strex(value, ptr)
sam_grove 5:3f93dd1d4cb3 250
sam_grove 5:3f93dd1d4cb3 251
sam_grove 5:3f93dd1d4cb3 252 /** \brief Remove the exclusive lock
sam_grove 5:3f93dd1d4cb3 253
sam_grove 5:3f93dd1d4cb3 254 This function removes the exclusive lock which is created by LDREX.
sam_grove 5:3f93dd1d4cb3 255
sam_grove 5:3f93dd1d4cb3 256 */
sam_grove 5:3f93dd1d4cb3 257 #define __CLREX __clrex
sam_grove 5:3f93dd1d4cb3 258
sam_grove 5:3f93dd1d4cb3 259
sam_grove 5:3f93dd1d4cb3 260 /** \brief Signed Saturate
sam_grove 5:3f93dd1d4cb3 261
sam_grove 5:3f93dd1d4cb3 262 This function saturates a signed value.
sam_grove 5:3f93dd1d4cb3 263
sam_grove 5:3f93dd1d4cb3 264 \param [in] value Value to be saturated
sam_grove 5:3f93dd1d4cb3 265 \param [in] sat Bit position to saturate to (1..32)
sam_grove 5:3f93dd1d4cb3 266 \return Saturated value
sam_grove 5:3f93dd1d4cb3 267 */
sam_grove 5:3f93dd1d4cb3 268 #define __SSAT __ssat
sam_grove 5:3f93dd1d4cb3 269
sam_grove 5:3f93dd1d4cb3 270
sam_grove 5:3f93dd1d4cb3 271 /** \brief Unsigned Saturate
sam_grove 5:3f93dd1d4cb3 272
sam_grove 5:3f93dd1d4cb3 273 This function saturates an unsigned value.
sam_grove 5:3f93dd1d4cb3 274
sam_grove 5:3f93dd1d4cb3 275 \param [in] value Value to be saturated
sam_grove 5:3f93dd1d4cb3 276 \param [in] sat Bit position to saturate to (0..31)
sam_grove 5:3f93dd1d4cb3 277 \return Saturated value
sam_grove 5:3f93dd1d4cb3 278 */
sam_grove 5:3f93dd1d4cb3 279 #define __USAT __usat
sam_grove 5:3f93dd1d4cb3 280
sam_grove 5:3f93dd1d4cb3 281
sam_grove 5:3f93dd1d4cb3 282 /** \brief Count leading zeros
sam_grove 5:3f93dd1d4cb3 283
sam_grove 5:3f93dd1d4cb3 284 This function counts the number of leading zeros of a data value.
sam_grove 5:3f93dd1d4cb3 285
sam_grove 5:3f93dd1d4cb3 286 \param [in] value Value to count the leading zeros
sam_grove 5:3f93dd1d4cb3 287 \return number of leading zeros in value
sam_grove 5:3f93dd1d4cb3 288 */
sam_grove 5:3f93dd1d4cb3 289 #define __CLZ __clz
sam_grove 5:3f93dd1d4cb3 290
sam_grove 5:3f93dd1d4cb3 291 #endif /* (__CORTEX_M >= 0x03) */
sam_grove 5:3f93dd1d4cb3 292
sam_grove 5:3f93dd1d4cb3 293
sam_grove 5:3f93dd1d4cb3 294
sam_grove 5:3f93dd1d4cb3 295 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
sam_grove 5:3f93dd1d4cb3 296 /* IAR iccarm specific functions */
sam_grove 5:3f93dd1d4cb3 297
sam_grove 5:3f93dd1d4cb3 298 #include <cmsis_iar.h>
sam_grove 5:3f93dd1d4cb3 299
sam_grove 5:3f93dd1d4cb3 300
sam_grove 5:3f93dd1d4cb3 301 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
sam_grove 5:3f93dd1d4cb3 302 /* TI CCS specific functions */
sam_grove 5:3f93dd1d4cb3 303
sam_grove 5:3f93dd1d4cb3 304 #include <cmsis_ccs.h>
sam_grove 5:3f93dd1d4cb3 305
sam_grove 5:3f93dd1d4cb3 306
sam_grove 5:3f93dd1d4cb3 307 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
sam_grove 5:3f93dd1d4cb3 308 /* GNU gcc specific functions */
sam_grove 5:3f93dd1d4cb3 309
sam_grove 5:3f93dd1d4cb3 310 /* Define macros for porting to both thumb1 and thumb2.
sam_grove 5:3f93dd1d4cb3 311 * For thumb1, use low register (r0-r7), specified by constrant "l"
sam_grove 5:3f93dd1d4cb3 312 * Otherwise, use general registers, specified by constrant "r" */
sam_grove 5:3f93dd1d4cb3 313 #if defined (__thumb__) && !defined (__thumb2__)
sam_grove 5:3f93dd1d4cb3 314 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
sam_grove 5:3f93dd1d4cb3 315 #define __CMSIS_GCC_USE_REG(r) "l" (r)
sam_grove 5:3f93dd1d4cb3 316 #else
sam_grove 5:3f93dd1d4cb3 317 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
sam_grove 5:3f93dd1d4cb3 318 #define __CMSIS_GCC_USE_REG(r) "r" (r)
sam_grove 5:3f93dd1d4cb3 319 #endif
sam_grove 5:3f93dd1d4cb3 320
sam_grove 5:3f93dd1d4cb3 321 /** \brief No Operation
sam_grove 5:3f93dd1d4cb3 322
sam_grove 5:3f93dd1d4cb3 323 No Operation does nothing. This instruction can be used for code alignment purposes.
sam_grove 5:3f93dd1d4cb3 324 */
sam_grove 5:3f93dd1d4cb3 325 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
sam_grove 5:3f93dd1d4cb3 326 {
sam_grove 5:3f93dd1d4cb3 327 __ASM volatile ("nop");
sam_grove 5:3f93dd1d4cb3 328 }
sam_grove 5:3f93dd1d4cb3 329
sam_grove 5:3f93dd1d4cb3 330
sam_grove 5:3f93dd1d4cb3 331 /** \brief Wait For Interrupt
sam_grove 5:3f93dd1d4cb3 332
sam_grove 5:3f93dd1d4cb3 333 Wait For Interrupt is a hint instruction that suspends execution
sam_grove 5:3f93dd1d4cb3 334 until one of a number of events occurs.
sam_grove 5:3f93dd1d4cb3 335 */
sam_grove 5:3f93dd1d4cb3 336 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
sam_grove 5:3f93dd1d4cb3 337 {
sam_grove 5:3f93dd1d4cb3 338 __ASM volatile ("wfi");
sam_grove 5:3f93dd1d4cb3 339 }
sam_grove 5:3f93dd1d4cb3 340
sam_grove 5:3f93dd1d4cb3 341
sam_grove 5:3f93dd1d4cb3 342 /** \brief Wait For Event
sam_grove 5:3f93dd1d4cb3 343
sam_grove 5:3f93dd1d4cb3 344 Wait For Event is a hint instruction that permits the processor to enter
sam_grove 5:3f93dd1d4cb3 345 a low-power state until one of a number of events occurs.
sam_grove 5:3f93dd1d4cb3 346 */
sam_grove 5:3f93dd1d4cb3 347 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
sam_grove 5:3f93dd1d4cb3 348 {
sam_grove 5:3f93dd1d4cb3 349 __ASM volatile ("wfe");
sam_grove 5:3f93dd1d4cb3 350 }
sam_grove 5:3f93dd1d4cb3 351
sam_grove 5:3f93dd1d4cb3 352
sam_grove 5:3f93dd1d4cb3 353 /** \brief Send Event
sam_grove 5:3f93dd1d4cb3 354
sam_grove 5:3f93dd1d4cb3 355 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
sam_grove 5:3f93dd1d4cb3 356 */
sam_grove 5:3f93dd1d4cb3 357 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
sam_grove 5:3f93dd1d4cb3 358 {
sam_grove 5:3f93dd1d4cb3 359 __ASM volatile ("sev");
sam_grove 5:3f93dd1d4cb3 360 }
sam_grove 5:3f93dd1d4cb3 361
sam_grove 5:3f93dd1d4cb3 362
sam_grove 5:3f93dd1d4cb3 363 /** \brief Instruction Synchronization Barrier
sam_grove 5:3f93dd1d4cb3 364
sam_grove 5:3f93dd1d4cb3 365 Instruction Synchronization Barrier flushes the pipeline in the processor,
sam_grove 5:3f93dd1d4cb3 366 so that all instructions following the ISB are fetched from cache or
sam_grove 5:3f93dd1d4cb3 367 memory, after the instruction has been completed.
sam_grove 5:3f93dd1d4cb3 368 */
sam_grove 5:3f93dd1d4cb3 369 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
sam_grove 5:3f93dd1d4cb3 370 {
sam_grove 5:3f93dd1d4cb3 371 __ASM volatile ("isb");
sam_grove 5:3f93dd1d4cb3 372 }
sam_grove 5:3f93dd1d4cb3 373
sam_grove 5:3f93dd1d4cb3 374
sam_grove 5:3f93dd1d4cb3 375 /** \brief Data Synchronization Barrier
sam_grove 5:3f93dd1d4cb3 376
sam_grove 5:3f93dd1d4cb3 377 This function acts as a special kind of Data Memory Barrier.
sam_grove 5:3f93dd1d4cb3 378 It completes when all explicit memory accesses before this instruction complete.
sam_grove 5:3f93dd1d4cb3 379 */
sam_grove 5:3f93dd1d4cb3 380 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
sam_grove 5:3f93dd1d4cb3 381 {
sam_grove 5:3f93dd1d4cb3 382 __ASM volatile ("dsb");
sam_grove 5:3f93dd1d4cb3 383 }
sam_grove 5:3f93dd1d4cb3 384
sam_grove 5:3f93dd1d4cb3 385
sam_grove 5:3f93dd1d4cb3 386 /** \brief Data Memory Barrier
sam_grove 5:3f93dd1d4cb3 387
sam_grove 5:3f93dd1d4cb3 388 This function ensures the apparent order of the explicit memory operations before
sam_grove 5:3f93dd1d4cb3 389 and after the instruction, without ensuring their completion.
sam_grove 5:3f93dd1d4cb3 390 */
sam_grove 5:3f93dd1d4cb3 391 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
sam_grove 5:3f93dd1d4cb3 392 {
sam_grove 5:3f93dd1d4cb3 393 __ASM volatile ("dmb");
sam_grove 5:3f93dd1d4cb3 394 }
sam_grove 5:3f93dd1d4cb3 395
sam_grove 5:3f93dd1d4cb3 396
sam_grove 5:3f93dd1d4cb3 397 /** \brief Reverse byte order (32 bit)
sam_grove 5:3f93dd1d4cb3 398
sam_grove 5:3f93dd1d4cb3 399 This function reverses the byte order in integer value.
sam_grove 5:3f93dd1d4cb3 400
sam_grove 5:3f93dd1d4cb3 401 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 402 \return Reversed value
sam_grove 5:3f93dd1d4cb3 403 */
sam_grove 5:3f93dd1d4cb3 404 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
sam_grove 5:3f93dd1d4cb3 405 {
sam_grove 5:3f93dd1d4cb3 406 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
sam_grove 5:3f93dd1d4cb3 407 return __builtin_bswap32(value);
sam_grove 5:3f93dd1d4cb3 408 #else
sam_grove 5:3f93dd1d4cb3 409 uint32_t result;
sam_grove 5:3f93dd1d4cb3 410
sam_grove 5:3f93dd1d4cb3 411 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
sam_grove 5:3f93dd1d4cb3 412 return(result);
sam_grove 5:3f93dd1d4cb3 413 #endif
sam_grove 5:3f93dd1d4cb3 414 }
sam_grove 5:3f93dd1d4cb3 415
sam_grove 5:3f93dd1d4cb3 416
sam_grove 5:3f93dd1d4cb3 417 /** \brief Reverse byte order (16 bit)
sam_grove 5:3f93dd1d4cb3 418
sam_grove 5:3f93dd1d4cb3 419 This function reverses the byte order in two unsigned short values.
sam_grove 5:3f93dd1d4cb3 420
sam_grove 5:3f93dd1d4cb3 421 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 422 \return Reversed value
sam_grove 5:3f93dd1d4cb3 423 */
sam_grove 5:3f93dd1d4cb3 424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
sam_grove 5:3f93dd1d4cb3 425 {
sam_grove 5:3f93dd1d4cb3 426 uint32_t result;
sam_grove 5:3f93dd1d4cb3 427
sam_grove 5:3f93dd1d4cb3 428 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
sam_grove 5:3f93dd1d4cb3 429 return(result);
sam_grove 5:3f93dd1d4cb3 430 }
sam_grove 5:3f93dd1d4cb3 431
sam_grove 5:3f93dd1d4cb3 432
sam_grove 5:3f93dd1d4cb3 433 /** \brief Reverse byte order in signed short value
sam_grove 5:3f93dd1d4cb3 434
sam_grove 5:3f93dd1d4cb3 435 This function reverses the byte order in a signed short value with sign extension to integer.
sam_grove 5:3f93dd1d4cb3 436
sam_grove 5:3f93dd1d4cb3 437 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 438 \return Reversed value
sam_grove 5:3f93dd1d4cb3 439 */
sam_grove 5:3f93dd1d4cb3 440 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
sam_grove 5:3f93dd1d4cb3 441 {
sam_grove 5:3f93dd1d4cb3 442 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
sam_grove 5:3f93dd1d4cb3 443 return (short)__builtin_bswap16(value);
sam_grove 5:3f93dd1d4cb3 444 #else
sam_grove 5:3f93dd1d4cb3 445 uint32_t result;
sam_grove 5:3f93dd1d4cb3 446
sam_grove 5:3f93dd1d4cb3 447 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
sam_grove 5:3f93dd1d4cb3 448 return(result);
sam_grove 5:3f93dd1d4cb3 449 #endif
sam_grove 5:3f93dd1d4cb3 450 }
sam_grove 5:3f93dd1d4cb3 451
sam_grove 5:3f93dd1d4cb3 452
sam_grove 5:3f93dd1d4cb3 453 /** \brief Rotate Right in unsigned value (32 bit)
sam_grove 5:3f93dd1d4cb3 454
sam_grove 5:3f93dd1d4cb3 455 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
sam_grove 5:3f93dd1d4cb3 456
sam_grove 5:3f93dd1d4cb3 457 \param [in] value Value to rotate
sam_grove 5:3f93dd1d4cb3 458 \param [in] value Number of Bits to rotate
sam_grove 5:3f93dd1d4cb3 459 \return Rotated value
sam_grove 5:3f93dd1d4cb3 460 */
sam_grove 5:3f93dd1d4cb3 461 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
sam_grove 5:3f93dd1d4cb3 462 {
sam_grove 5:3f93dd1d4cb3 463 return (op1 >> op2) | (op1 << (32 - op2));
sam_grove 5:3f93dd1d4cb3 464 }
sam_grove 5:3f93dd1d4cb3 465
sam_grove 5:3f93dd1d4cb3 466
sam_grove 5:3f93dd1d4cb3 467 /** \brief Breakpoint
sam_grove 5:3f93dd1d4cb3 468
sam_grove 5:3f93dd1d4cb3 469 This function causes the processor to enter Debug state.
sam_grove 5:3f93dd1d4cb3 470 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
sam_grove 5:3f93dd1d4cb3 471
sam_grove 5:3f93dd1d4cb3 472 \param [in] value is ignored by the processor.
sam_grove 5:3f93dd1d4cb3 473 If required, a debugger can use it to store additional information about the breakpoint.
sam_grove 5:3f93dd1d4cb3 474 */
sam_grove 5:3f93dd1d4cb3 475 #define __BKPT(value) __ASM volatile ("bkpt "#value)
sam_grove 5:3f93dd1d4cb3 476
sam_grove 5:3f93dd1d4cb3 477
sam_grove 5:3f93dd1d4cb3 478 #if (__CORTEX_M >= 0x03)
sam_grove 5:3f93dd1d4cb3 479
sam_grove 5:3f93dd1d4cb3 480 /** \brief Reverse bit order of value
sam_grove 5:3f93dd1d4cb3 481
sam_grove 5:3f93dd1d4cb3 482 This function reverses the bit order of the given value.
sam_grove 5:3f93dd1d4cb3 483
sam_grove 5:3f93dd1d4cb3 484 \param [in] value Value to reverse
sam_grove 5:3f93dd1d4cb3 485 \return Reversed value
sam_grove 5:3f93dd1d4cb3 486 */
sam_grove 5:3f93dd1d4cb3 487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
sam_grove 5:3f93dd1d4cb3 488 {
sam_grove 5:3f93dd1d4cb3 489 uint32_t result;
sam_grove 5:3f93dd1d4cb3 490
sam_grove 5:3f93dd1d4cb3 491 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
sam_grove 5:3f93dd1d4cb3 492 return(result);
sam_grove 5:3f93dd1d4cb3 493 }
sam_grove 5:3f93dd1d4cb3 494
sam_grove 5:3f93dd1d4cb3 495
sam_grove 5:3f93dd1d4cb3 496 /** \brief LDR Exclusive (8 bit)
sam_grove 5:3f93dd1d4cb3 497
sam_grove 5:3f93dd1d4cb3 498 This function performs a exclusive LDR command for 8 bit value.
sam_grove 5:3f93dd1d4cb3 499
sam_grove 5:3f93dd1d4cb3 500 \param [in] ptr Pointer to data
sam_grove 5:3f93dd1d4cb3 501 \return value of type uint8_t at (*ptr)
sam_grove 5:3f93dd1d4cb3 502 */
sam_grove 5:3f93dd1d4cb3 503 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
sam_grove 5:3f93dd1d4cb3 504 {
sam_grove 5:3f93dd1d4cb3 505 uint32_t result;
sam_grove 5:3f93dd1d4cb3 506
sam_grove 5:3f93dd1d4cb3 507 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
sam_grove 5:3f93dd1d4cb3 508 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
sam_grove 5:3f93dd1d4cb3 509 #else
sam_grove 5:3f93dd1d4cb3 510 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
sam_grove 5:3f93dd1d4cb3 511 accepted by assembler. So has to use following less efficient pattern.
sam_grove 5:3f93dd1d4cb3 512 */
sam_grove 5:3f93dd1d4cb3 513 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
sam_grove 5:3f93dd1d4cb3 514 #endif
sam_grove 5:3f93dd1d4cb3 515 return(result);
sam_grove 5:3f93dd1d4cb3 516 }
sam_grove 5:3f93dd1d4cb3 517
sam_grove 5:3f93dd1d4cb3 518
sam_grove 5:3f93dd1d4cb3 519 /** \brief LDR Exclusive (16 bit)
sam_grove 5:3f93dd1d4cb3 520
sam_grove 5:3f93dd1d4cb3 521 This function performs a exclusive LDR command for 16 bit values.
sam_grove 5:3f93dd1d4cb3 522
sam_grove 5:3f93dd1d4cb3 523 \param [in] ptr Pointer to data
sam_grove 5:3f93dd1d4cb3 524 \return value of type uint16_t at (*ptr)
sam_grove 5:3f93dd1d4cb3 525 */
sam_grove 5:3f93dd1d4cb3 526 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
sam_grove 5:3f93dd1d4cb3 527 {
sam_grove 5:3f93dd1d4cb3 528 uint32_t result;
sam_grove 5:3f93dd1d4cb3 529
sam_grove 5:3f93dd1d4cb3 530 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
sam_grove 5:3f93dd1d4cb3 531 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
sam_grove 5:3f93dd1d4cb3 532 #else
sam_grove 5:3f93dd1d4cb3 533 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
sam_grove 5:3f93dd1d4cb3 534 accepted by assembler. So has to use following less efficient pattern.
sam_grove 5:3f93dd1d4cb3 535 */
sam_grove 5:3f93dd1d4cb3 536 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
sam_grove 5:3f93dd1d4cb3 537 #endif
sam_grove 5:3f93dd1d4cb3 538 return(result);
sam_grove 5:3f93dd1d4cb3 539 }
sam_grove 5:3f93dd1d4cb3 540
sam_grove 5:3f93dd1d4cb3 541
sam_grove 5:3f93dd1d4cb3 542 /** \brief LDR Exclusive (32 bit)
sam_grove 5:3f93dd1d4cb3 543
sam_grove 5:3f93dd1d4cb3 544 This function performs a exclusive LDR command for 32 bit values.
sam_grove 5:3f93dd1d4cb3 545
sam_grove 5:3f93dd1d4cb3 546 \param [in] ptr Pointer to data
sam_grove 5:3f93dd1d4cb3 547 \return value of type uint32_t at (*ptr)
sam_grove 5:3f93dd1d4cb3 548 */
sam_grove 5:3f93dd1d4cb3 549 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
sam_grove 5:3f93dd1d4cb3 550 {
sam_grove 5:3f93dd1d4cb3 551 uint32_t result;
sam_grove 5:3f93dd1d4cb3 552
sam_grove 5:3f93dd1d4cb3 553 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
sam_grove 5:3f93dd1d4cb3 554 return(result);
sam_grove 5:3f93dd1d4cb3 555 }
sam_grove 5:3f93dd1d4cb3 556
sam_grove 5:3f93dd1d4cb3 557
sam_grove 5:3f93dd1d4cb3 558 /** \brief STR Exclusive (8 bit)
sam_grove 5:3f93dd1d4cb3 559
sam_grove 5:3f93dd1d4cb3 560 This function performs a exclusive STR command for 8 bit values.
sam_grove 5:3f93dd1d4cb3 561
sam_grove 5:3f93dd1d4cb3 562 \param [in] value Value to store
sam_grove 5:3f93dd1d4cb3 563 \param [in] ptr Pointer to location
sam_grove 5:3f93dd1d4cb3 564 \return 0 Function succeeded
sam_grove 5:3f93dd1d4cb3 565 \return 1 Function failed
sam_grove 5:3f93dd1d4cb3 566 */
sam_grove 5:3f93dd1d4cb3 567 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
sam_grove 5:3f93dd1d4cb3 568 {
sam_grove 5:3f93dd1d4cb3 569 uint32_t result;
sam_grove 5:3f93dd1d4cb3 570
sam_grove 5:3f93dd1d4cb3 571 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
sam_grove 5:3f93dd1d4cb3 572 return(result);
sam_grove 5:3f93dd1d4cb3 573 }
sam_grove 5:3f93dd1d4cb3 574
sam_grove 5:3f93dd1d4cb3 575
sam_grove 5:3f93dd1d4cb3 576 /** \brief STR Exclusive (16 bit)
sam_grove 5:3f93dd1d4cb3 577
sam_grove 5:3f93dd1d4cb3 578 This function performs a exclusive STR command for 16 bit values.
sam_grove 5:3f93dd1d4cb3 579
sam_grove 5:3f93dd1d4cb3 580 \param [in] value Value to store
sam_grove 5:3f93dd1d4cb3 581 \param [in] ptr Pointer to location
sam_grove 5:3f93dd1d4cb3 582 \return 0 Function succeeded
sam_grove 5:3f93dd1d4cb3 583 \return 1 Function failed
sam_grove 5:3f93dd1d4cb3 584 */
sam_grove 5:3f93dd1d4cb3 585 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
sam_grove 5:3f93dd1d4cb3 586 {
sam_grove 5:3f93dd1d4cb3 587 uint32_t result;
sam_grove 5:3f93dd1d4cb3 588
sam_grove 5:3f93dd1d4cb3 589 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
sam_grove 5:3f93dd1d4cb3 590 return(result);
sam_grove 5:3f93dd1d4cb3 591 }
sam_grove 5:3f93dd1d4cb3 592
sam_grove 5:3f93dd1d4cb3 593
sam_grove 5:3f93dd1d4cb3 594 /** \brief STR Exclusive (32 bit)
sam_grove 5:3f93dd1d4cb3 595
sam_grove 5:3f93dd1d4cb3 596 This function performs a exclusive STR command for 32 bit values.
sam_grove 5:3f93dd1d4cb3 597
sam_grove 5:3f93dd1d4cb3 598 \param [in] value Value to store
sam_grove 5:3f93dd1d4cb3 599 \param [in] ptr Pointer to location
sam_grove 5:3f93dd1d4cb3 600 \return 0 Function succeeded
sam_grove 5:3f93dd1d4cb3 601 \return 1 Function failed
sam_grove 5:3f93dd1d4cb3 602 */
sam_grove 5:3f93dd1d4cb3 603 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
sam_grove 5:3f93dd1d4cb3 604 {
sam_grove 5:3f93dd1d4cb3 605 uint32_t result;
sam_grove 5:3f93dd1d4cb3 606
sam_grove 5:3f93dd1d4cb3 607 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
sam_grove 5:3f93dd1d4cb3 608 return(result);
sam_grove 5:3f93dd1d4cb3 609 }
sam_grove 5:3f93dd1d4cb3 610
sam_grove 5:3f93dd1d4cb3 611
sam_grove 5:3f93dd1d4cb3 612 /** \brief Remove the exclusive lock
sam_grove 5:3f93dd1d4cb3 613
sam_grove 5:3f93dd1d4cb3 614 This function removes the exclusive lock which is created by LDREX.
sam_grove 5:3f93dd1d4cb3 615
sam_grove 5:3f93dd1d4cb3 616 */
sam_grove 5:3f93dd1d4cb3 617 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
sam_grove 5:3f93dd1d4cb3 618 {
sam_grove 5:3f93dd1d4cb3 619 __ASM volatile ("clrex" ::: "memory");
sam_grove 5:3f93dd1d4cb3 620 }
sam_grove 5:3f93dd1d4cb3 621
sam_grove 5:3f93dd1d4cb3 622
sam_grove 5:3f93dd1d4cb3 623 /** \brief Signed Saturate
sam_grove 5:3f93dd1d4cb3 624
sam_grove 5:3f93dd1d4cb3 625 This function saturates a signed value.
sam_grove 5:3f93dd1d4cb3 626
sam_grove 5:3f93dd1d4cb3 627 \param [in] value Value to be saturated
sam_grove 5:3f93dd1d4cb3 628 \param [in] sat Bit position to saturate to (1..32)
sam_grove 5:3f93dd1d4cb3 629 \return Saturated value
sam_grove 5:3f93dd1d4cb3 630 */
sam_grove 5:3f93dd1d4cb3 631 #define __SSAT(ARG1,ARG2) \
sam_grove 5:3f93dd1d4cb3 632 ({ \
sam_grove 5:3f93dd1d4cb3 633 uint32_t __RES, __ARG1 = (ARG1); \
sam_grove 5:3f93dd1d4cb3 634 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
sam_grove 5:3f93dd1d4cb3 635 __RES; \
sam_grove 5:3f93dd1d4cb3 636 })
sam_grove 5:3f93dd1d4cb3 637
sam_grove 5:3f93dd1d4cb3 638
sam_grove 5:3f93dd1d4cb3 639 /** \brief Unsigned Saturate
sam_grove 5:3f93dd1d4cb3 640
sam_grove 5:3f93dd1d4cb3 641 This function saturates an unsigned value.
sam_grove 5:3f93dd1d4cb3 642
sam_grove 5:3f93dd1d4cb3 643 \param [in] value Value to be saturated
sam_grove 5:3f93dd1d4cb3 644 \param [in] sat Bit position to saturate to (0..31)
sam_grove 5:3f93dd1d4cb3 645 \return Saturated value
sam_grove 5:3f93dd1d4cb3 646 */
sam_grove 5:3f93dd1d4cb3 647 #define __USAT(ARG1,ARG2) \
sam_grove 5:3f93dd1d4cb3 648 ({ \
sam_grove 5:3f93dd1d4cb3 649 uint32_t __RES, __ARG1 = (ARG1); \
sam_grove 5:3f93dd1d4cb3 650 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
sam_grove 5:3f93dd1d4cb3 651 __RES; \
sam_grove 5:3f93dd1d4cb3 652 })
sam_grove 5:3f93dd1d4cb3 653
sam_grove 5:3f93dd1d4cb3 654
sam_grove 5:3f93dd1d4cb3 655 /** \brief Count leading zeros
sam_grove 5:3f93dd1d4cb3 656
sam_grove 5:3f93dd1d4cb3 657 This function counts the number of leading zeros of a data value.
sam_grove 5:3f93dd1d4cb3 658
sam_grove 5:3f93dd1d4cb3 659 \param [in] value Value to count the leading zeros
sam_grove 5:3f93dd1d4cb3 660 \return number of leading zeros in value
sam_grove 5:3f93dd1d4cb3 661 */
sam_grove 5:3f93dd1d4cb3 662 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
sam_grove 5:3f93dd1d4cb3 663 {
sam_grove 5:3f93dd1d4cb3 664 uint32_t result;
sam_grove 5:3f93dd1d4cb3 665
sam_grove 5:3f93dd1d4cb3 666 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
sam_grove 5:3f93dd1d4cb3 667 return(result);
sam_grove 5:3f93dd1d4cb3 668 }
sam_grove 5:3f93dd1d4cb3 669
sam_grove 5:3f93dd1d4cb3 670 #endif /* (__CORTEX_M >= 0x03) */
sam_grove 5:3f93dd1d4cb3 671
sam_grove 5:3f93dd1d4cb3 672
sam_grove 5:3f93dd1d4cb3 673
sam_grove 5:3f93dd1d4cb3 674
sam_grove 5:3f93dd1d4cb3 675 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
sam_grove 5:3f93dd1d4cb3 676 /* TASKING carm specific functions */
sam_grove 5:3f93dd1d4cb3 677
sam_grove 5:3f93dd1d4cb3 678 /*
sam_grove 5:3f93dd1d4cb3 679 * The CMSIS functions have been implemented as intrinsics in the compiler.
sam_grove 5:3f93dd1d4cb3 680 * Please use "carm -?i" to get an up to date list of all intrinsics,
sam_grove 5:3f93dd1d4cb3 681 * Including the CMSIS ones.
sam_grove 5:3f93dd1d4cb3 682 */
sam_grove 5:3f93dd1d4cb3 683
sam_grove 5:3f93dd1d4cb3 684 #endif
sam_grove 5:3f93dd1d4cb3 685
sam_grove 5:3f93dd1d4cb3 686 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
sam_grove 5:3f93dd1d4cb3 687
sam_grove 5:3f93dd1d4cb3 688 #endif /* __CORE_CMINSTR_H */