RTC auf true

Committer:
kevman
Date:
Wed Mar 13 11:03:24 2019 +0000
Revision:
2:7aab896b1a3b
Parent:
0:38ceb79fef03
2019-03-13

Who changed what in which revision?

UserRevisionLine numberNew contents of line
kevman 0:38ceb79fef03 1 /**************************************************************************//**
kevman 0:38ceb79fef03 2 * @file cmsis_gcc.h
kevman 0:38ceb79fef03 3 * @brief CMSIS compiler GCC header file
kevman 0:38ceb79fef03 4 * @version V5.0.4
kevman 0:38ceb79fef03 5 * @date 09. April 2018
kevman 0:38ceb79fef03 6 ******************************************************************************/
kevman 0:38ceb79fef03 7 /*
kevman 0:38ceb79fef03 8 * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
kevman 0:38ceb79fef03 9 *
kevman 0:38ceb79fef03 10 * SPDX-License-Identifier: Apache-2.0
kevman 0:38ceb79fef03 11 *
kevman 0:38ceb79fef03 12 * Licensed under the Apache License, Version 2.0 (the License); you may
kevman 0:38ceb79fef03 13 * not use this file except in compliance with the License.
kevman 0:38ceb79fef03 14 * You may obtain a copy of the License at
kevman 0:38ceb79fef03 15 *
kevman 0:38ceb79fef03 16 * www.apache.org/licenses/LICENSE-2.0
kevman 0:38ceb79fef03 17 *
kevman 0:38ceb79fef03 18 * Unless required by applicable law or agreed to in writing, software
kevman 0:38ceb79fef03 19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
kevman 0:38ceb79fef03 20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
kevman 0:38ceb79fef03 21 * See the License for the specific language governing permissions and
kevman 0:38ceb79fef03 22 * limitations under the License.
kevman 0:38ceb79fef03 23 */
kevman 0:38ceb79fef03 24
kevman 0:38ceb79fef03 25 #ifndef __CMSIS_GCC_H
kevman 0:38ceb79fef03 26 #define __CMSIS_GCC_H
kevman 0:38ceb79fef03 27
kevman 0:38ceb79fef03 28 /* ignore some GCC warnings */
kevman 0:38ceb79fef03 29 #pragma GCC diagnostic push
kevman 0:38ceb79fef03 30 #pragma GCC diagnostic ignored "-Wsign-conversion"
kevman 0:38ceb79fef03 31 #pragma GCC diagnostic ignored "-Wconversion"
kevman 0:38ceb79fef03 32 #pragma GCC diagnostic ignored "-Wunused-parameter"
kevman 0:38ceb79fef03 33
kevman 0:38ceb79fef03 34 /* Fallback for __has_builtin */
kevman 0:38ceb79fef03 35 #ifndef __has_builtin
kevman 0:38ceb79fef03 36 #define __has_builtin(x) (0)
kevman 0:38ceb79fef03 37 #endif
kevman 0:38ceb79fef03 38
kevman 0:38ceb79fef03 39 /* CMSIS compiler specific defines */
kevman 0:38ceb79fef03 40 #ifndef __ASM
kevman 0:38ceb79fef03 41 #define __ASM __asm
kevman 0:38ceb79fef03 42 #endif
kevman 0:38ceb79fef03 43 #ifndef __INLINE
kevman 0:38ceb79fef03 44 #define __INLINE inline
kevman 0:38ceb79fef03 45 #endif
kevman 0:38ceb79fef03 46 #ifndef __STATIC_INLINE
kevman 0:38ceb79fef03 47 #define __STATIC_INLINE static inline
kevman 0:38ceb79fef03 48 #endif
kevman 0:38ceb79fef03 49 #ifndef __STATIC_FORCEINLINE
kevman 0:38ceb79fef03 50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
kevman 0:38ceb79fef03 51 #endif
kevman 0:38ceb79fef03 52 #ifndef __NO_RETURN
kevman 0:38ceb79fef03 53 #define __NO_RETURN __attribute__((__noreturn__))
kevman 0:38ceb79fef03 54 #endif
kevman 0:38ceb79fef03 55 #ifndef __USED
kevman 0:38ceb79fef03 56 #define __USED __attribute__((used))
kevman 0:38ceb79fef03 57 #endif
kevman 0:38ceb79fef03 58 #ifndef __WEAK
kevman 0:38ceb79fef03 59 #define __WEAK __attribute__((weak))
kevman 0:38ceb79fef03 60 #endif
kevman 0:38ceb79fef03 61 #ifndef __PACKED
kevman 0:38ceb79fef03 62 #define __PACKED __attribute__((packed, aligned(1)))
kevman 0:38ceb79fef03 63 #endif
kevman 0:38ceb79fef03 64 #ifndef __PACKED_STRUCT
kevman 0:38ceb79fef03 65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
kevman 0:38ceb79fef03 66 #endif
kevman 0:38ceb79fef03 67 #ifndef __PACKED_UNION
kevman 0:38ceb79fef03 68 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
kevman 0:38ceb79fef03 69 #endif
kevman 0:38ceb79fef03 70 #ifndef __UNALIGNED_UINT32 /* deprecated */
kevman 0:38ceb79fef03 71 #pragma GCC diagnostic push
kevman 0:38ceb79fef03 72 #pragma GCC diagnostic ignored "-Wpacked"
kevman 0:38ceb79fef03 73 #pragma GCC diagnostic ignored "-Wattributes"
kevman 0:38ceb79fef03 74 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
kevman 0:38ceb79fef03 75 #pragma GCC diagnostic pop
kevman 0:38ceb79fef03 76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
kevman 0:38ceb79fef03 77 #endif
kevman 0:38ceb79fef03 78 #ifndef __UNALIGNED_UINT16_WRITE
kevman 0:38ceb79fef03 79 #pragma GCC diagnostic push
kevman 0:38ceb79fef03 80 #pragma GCC diagnostic ignored "-Wpacked"
kevman 0:38ceb79fef03 81 #pragma GCC diagnostic ignored "-Wattributes"
kevman 0:38ceb79fef03 82 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
kevman 0:38ceb79fef03 83 #pragma GCC diagnostic pop
kevman 0:38ceb79fef03 84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
kevman 0:38ceb79fef03 85 #endif
kevman 0:38ceb79fef03 86 #ifndef __UNALIGNED_UINT16_READ
kevman 0:38ceb79fef03 87 #pragma GCC diagnostic push
kevman 0:38ceb79fef03 88 #pragma GCC diagnostic ignored "-Wpacked"
kevman 0:38ceb79fef03 89 #pragma GCC diagnostic ignored "-Wattributes"
kevman 0:38ceb79fef03 90 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
kevman 0:38ceb79fef03 91 #pragma GCC diagnostic pop
kevman 0:38ceb79fef03 92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
kevman 0:38ceb79fef03 93 #endif
kevman 0:38ceb79fef03 94 #ifndef __UNALIGNED_UINT32_WRITE
kevman 0:38ceb79fef03 95 #pragma GCC diagnostic push
kevman 0:38ceb79fef03 96 #pragma GCC diagnostic ignored "-Wpacked"
kevman 0:38ceb79fef03 97 #pragma GCC diagnostic ignored "-Wattributes"
kevman 0:38ceb79fef03 98 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
kevman 0:38ceb79fef03 99 #pragma GCC diagnostic pop
kevman 0:38ceb79fef03 100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
kevman 0:38ceb79fef03 101 #endif
kevman 0:38ceb79fef03 102 #ifndef __UNALIGNED_UINT32_READ
kevman 0:38ceb79fef03 103 #pragma GCC diagnostic push
kevman 0:38ceb79fef03 104 #pragma GCC diagnostic ignored "-Wpacked"
kevman 0:38ceb79fef03 105 #pragma GCC diagnostic ignored "-Wattributes"
kevman 0:38ceb79fef03 106 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
kevman 0:38ceb79fef03 107 #pragma GCC diagnostic pop
kevman 0:38ceb79fef03 108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
kevman 0:38ceb79fef03 109 #endif
kevman 0:38ceb79fef03 110 #ifndef __ALIGNED
kevman 0:38ceb79fef03 111 #define __ALIGNED(x) __attribute__((aligned(x)))
kevman 0:38ceb79fef03 112 #endif
kevman 0:38ceb79fef03 113 #ifndef __RESTRICT
kevman 0:38ceb79fef03 114 #define __RESTRICT __restrict
kevman 0:38ceb79fef03 115 #endif
kevman 0:38ceb79fef03 116
kevman 0:38ceb79fef03 117
kevman 0:38ceb79fef03 118 /* ########################### Core Function Access ########################### */
kevman 0:38ceb79fef03 119 /** \ingroup CMSIS_Core_FunctionInterface
kevman 0:38ceb79fef03 120 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
kevman 0:38ceb79fef03 121 @{
kevman 0:38ceb79fef03 122 */
kevman 0:38ceb79fef03 123
kevman 0:38ceb79fef03 124 /**
kevman 0:38ceb79fef03 125 \brief Enable IRQ Interrupts
kevman 0:38ceb79fef03 126 \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
kevman 0:38ceb79fef03 127 Can only be executed in Privileged modes.
kevman 0:38ceb79fef03 128 */
kevman 0:38ceb79fef03 129 __STATIC_FORCEINLINE void __enable_irq(void)
kevman 0:38ceb79fef03 130 {
kevman 0:38ceb79fef03 131 __ASM volatile ("cpsie i" : : : "memory");
kevman 0:38ceb79fef03 132 }
kevman 0:38ceb79fef03 133
kevman 0:38ceb79fef03 134
kevman 0:38ceb79fef03 135 /**
kevman 0:38ceb79fef03 136 \brief Disable IRQ Interrupts
kevman 0:38ceb79fef03 137 \details Disables IRQ interrupts by setting the I-bit in the CPSR.
kevman 0:38ceb79fef03 138 Can only be executed in Privileged modes.
kevman 0:38ceb79fef03 139 */
kevman 0:38ceb79fef03 140 __STATIC_FORCEINLINE void __disable_irq(void)
kevman 0:38ceb79fef03 141 {
kevman 0:38ceb79fef03 142 __ASM volatile ("cpsid i" : : : "memory");
kevman 0:38ceb79fef03 143 }
kevman 0:38ceb79fef03 144
kevman 0:38ceb79fef03 145
kevman 0:38ceb79fef03 146 /**
kevman 0:38ceb79fef03 147 \brief Get Control Register
kevman 0:38ceb79fef03 148 \details Returns the content of the Control Register.
kevman 0:38ceb79fef03 149 \return Control Register value
kevman 0:38ceb79fef03 150 */
kevman 0:38ceb79fef03 151 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
kevman 0:38ceb79fef03 152 {
kevman 0:38ceb79fef03 153 uint32_t result;
kevman 0:38ceb79fef03 154
kevman 0:38ceb79fef03 155 __ASM volatile ("MRS %0, control" : "=r" (result) );
kevman 0:38ceb79fef03 156 return(result);
kevman 0:38ceb79fef03 157 }
kevman 0:38ceb79fef03 158
kevman 0:38ceb79fef03 159
kevman 0:38ceb79fef03 160 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 161 /**
kevman 0:38ceb79fef03 162 \brief Get Control Register (non-secure)
kevman 0:38ceb79fef03 163 \details Returns the content of the non-secure Control Register when in secure mode.
kevman 0:38ceb79fef03 164 \return non-secure Control Register value
kevman 0:38ceb79fef03 165 */
kevman 0:38ceb79fef03 166 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
kevman 0:38ceb79fef03 167 {
kevman 0:38ceb79fef03 168 uint32_t result;
kevman 0:38ceb79fef03 169
kevman 0:38ceb79fef03 170 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
kevman 0:38ceb79fef03 171 return(result);
kevman 0:38ceb79fef03 172 }
kevman 0:38ceb79fef03 173 #endif
kevman 0:38ceb79fef03 174
kevman 0:38ceb79fef03 175
kevman 0:38ceb79fef03 176 /**
kevman 0:38ceb79fef03 177 \brief Set Control Register
kevman 0:38ceb79fef03 178 \details Writes the given value to the Control Register.
kevman 0:38ceb79fef03 179 \param [in] control Control Register value to set
kevman 0:38ceb79fef03 180 */
kevman 0:38ceb79fef03 181 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
kevman 0:38ceb79fef03 182 {
kevman 0:38ceb79fef03 183 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
kevman 0:38ceb79fef03 184 }
kevman 0:38ceb79fef03 185
kevman 0:38ceb79fef03 186
kevman 0:38ceb79fef03 187 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 188 /**
kevman 0:38ceb79fef03 189 \brief Set Control Register (non-secure)
kevman 0:38ceb79fef03 190 \details Writes the given value to the non-secure Control Register when in secure state.
kevman 0:38ceb79fef03 191 \param [in] control Control Register value to set
kevman 0:38ceb79fef03 192 */
kevman 0:38ceb79fef03 193 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
kevman 0:38ceb79fef03 194 {
kevman 0:38ceb79fef03 195 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
kevman 0:38ceb79fef03 196 }
kevman 0:38ceb79fef03 197 #endif
kevman 0:38ceb79fef03 198
kevman 0:38ceb79fef03 199
kevman 0:38ceb79fef03 200 /**
kevman 0:38ceb79fef03 201 \brief Get IPSR Register
kevman 0:38ceb79fef03 202 \details Returns the content of the IPSR Register.
kevman 0:38ceb79fef03 203 \return IPSR Register value
kevman 0:38ceb79fef03 204 */
kevman 0:38ceb79fef03 205 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
kevman 0:38ceb79fef03 206 {
kevman 0:38ceb79fef03 207 uint32_t result;
kevman 0:38ceb79fef03 208
kevman 0:38ceb79fef03 209 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
kevman 0:38ceb79fef03 210 return(result);
kevman 0:38ceb79fef03 211 }
kevman 0:38ceb79fef03 212
kevman 0:38ceb79fef03 213
kevman 0:38ceb79fef03 214 /**
kevman 0:38ceb79fef03 215 \brief Get APSR Register
kevman 0:38ceb79fef03 216 \details Returns the content of the APSR Register.
kevman 0:38ceb79fef03 217 \return APSR Register value
kevman 0:38ceb79fef03 218 */
kevman 0:38ceb79fef03 219 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
kevman 0:38ceb79fef03 220 {
kevman 0:38ceb79fef03 221 uint32_t result;
kevman 0:38ceb79fef03 222
kevman 0:38ceb79fef03 223 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
kevman 0:38ceb79fef03 224 return(result);
kevman 0:38ceb79fef03 225 }
kevman 0:38ceb79fef03 226
kevman 0:38ceb79fef03 227
kevman 0:38ceb79fef03 228 /**
kevman 0:38ceb79fef03 229 \brief Get xPSR Register
kevman 0:38ceb79fef03 230 \details Returns the content of the xPSR Register.
kevman 0:38ceb79fef03 231 \return xPSR Register value
kevman 0:38ceb79fef03 232 */
kevman 0:38ceb79fef03 233 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
kevman 0:38ceb79fef03 234 {
kevman 0:38ceb79fef03 235 uint32_t result;
kevman 0:38ceb79fef03 236
kevman 0:38ceb79fef03 237 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
kevman 0:38ceb79fef03 238 return(result);
kevman 0:38ceb79fef03 239 }
kevman 0:38ceb79fef03 240
kevman 0:38ceb79fef03 241
kevman 0:38ceb79fef03 242 /**
kevman 0:38ceb79fef03 243 \brief Get Process Stack Pointer
kevman 0:38ceb79fef03 244 \details Returns the current value of the Process Stack Pointer (PSP).
kevman 0:38ceb79fef03 245 \return PSP Register value
kevman 0:38ceb79fef03 246 */
kevman 0:38ceb79fef03 247 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
kevman 0:38ceb79fef03 248 {
kevman 0:38ceb79fef03 249 uint32_t result;
kevman 0:38ceb79fef03 250
kevman 0:38ceb79fef03 251 __ASM volatile ("MRS %0, psp" : "=r" (result) );
kevman 0:38ceb79fef03 252 return(result);
kevman 0:38ceb79fef03 253 }
kevman 0:38ceb79fef03 254
kevman 0:38ceb79fef03 255
kevman 0:38ceb79fef03 256 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 257 /**
kevman 0:38ceb79fef03 258 \brief Get Process Stack Pointer (non-secure)
kevman 0:38ceb79fef03 259 \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
kevman 0:38ceb79fef03 260 \return PSP Register value
kevman 0:38ceb79fef03 261 */
kevman 0:38ceb79fef03 262 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
kevman 0:38ceb79fef03 263 {
kevman 0:38ceb79fef03 264 uint32_t result;
kevman 0:38ceb79fef03 265
kevman 0:38ceb79fef03 266 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
kevman 0:38ceb79fef03 267 return(result);
kevman 0:38ceb79fef03 268 }
kevman 0:38ceb79fef03 269 #endif
kevman 0:38ceb79fef03 270
kevman 0:38ceb79fef03 271
kevman 0:38ceb79fef03 272 /**
kevman 0:38ceb79fef03 273 \brief Set Process Stack Pointer
kevman 0:38ceb79fef03 274 \details Assigns the given value to the Process Stack Pointer (PSP).
kevman 0:38ceb79fef03 275 \param [in] topOfProcStack Process Stack Pointer value to set
kevman 0:38ceb79fef03 276 */
kevman 0:38ceb79fef03 277 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
kevman 0:38ceb79fef03 278 {
kevman 0:38ceb79fef03 279 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
kevman 0:38ceb79fef03 280 }
kevman 0:38ceb79fef03 281
kevman 0:38ceb79fef03 282
kevman 0:38ceb79fef03 283 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 284 /**
kevman 0:38ceb79fef03 285 \brief Set Process Stack Pointer (non-secure)
kevman 0:38ceb79fef03 286 \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
kevman 0:38ceb79fef03 287 \param [in] topOfProcStack Process Stack Pointer value to set
kevman 0:38ceb79fef03 288 */
kevman 0:38ceb79fef03 289 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
kevman 0:38ceb79fef03 290 {
kevman 0:38ceb79fef03 291 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
kevman 0:38ceb79fef03 292 }
kevman 0:38ceb79fef03 293 #endif
kevman 0:38ceb79fef03 294
kevman 0:38ceb79fef03 295
kevman 0:38ceb79fef03 296 /**
kevman 0:38ceb79fef03 297 \brief Get Main Stack Pointer
kevman 0:38ceb79fef03 298 \details Returns the current value of the Main Stack Pointer (MSP).
kevman 0:38ceb79fef03 299 \return MSP Register value
kevman 0:38ceb79fef03 300 */
kevman 0:38ceb79fef03 301 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
kevman 0:38ceb79fef03 302 {
kevman 0:38ceb79fef03 303 uint32_t result;
kevman 0:38ceb79fef03 304
kevman 0:38ceb79fef03 305 __ASM volatile ("MRS %0, msp" : "=r" (result) );
kevman 0:38ceb79fef03 306 return(result);
kevman 0:38ceb79fef03 307 }
kevman 0:38ceb79fef03 308
kevman 0:38ceb79fef03 309
kevman 0:38ceb79fef03 310 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 311 /**
kevman 0:38ceb79fef03 312 \brief Get Main Stack Pointer (non-secure)
kevman 0:38ceb79fef03 313 \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
kevman 0:38ceb79fef03 314 \return MSP Register value
kevman 0:38ceb79fef03 315 */
kevman 0:38ceb79fef03 316 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
kevman 0:38ceb79fef03 317 {
kevman 0:38ceb79fef03 318 uint32_t result;
kevman 0:38ceb79fef03 319
kevman 0:38ceb79fef03 320 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
kevman 0:38ceb79fef03 321 return(result);
kevman 0:38ceb79fef03 322 }
kevman 0:38ceb79fef03 323 #endif
kevman 0:38ceb79fef03 324
kevman 0:38ceb79fef03 325
kevman 0:38ceb79fef03 326 /**
kevman 0:38ceb79fef03 327 \brief Set Main Stack Pointer
kevman 0:38ceb79fef03 328 \details Assigns the given value to the Main Stack Pointer (MSP).
kevman 0:38ceb79fef03 329 \param [in] topOfMainStack Main Stack Pointer value to set
kevman 0:38ceb79fef03 330 */
kevman 0:38ceb79fef03 331 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
kevman 0:38ceb79fef03 332 {
kevman 0:38ceb79fef03 333 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
kevman 0:38ceb79fef03 334 }
kevman 0:38ceb79fef03 335
kevman 0:38ceb79fef03 336
kevman 0:38ceb79fef03 337 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 338 /**
kevman 0:38ceb79fef03 339 \brief Set Main Stack Pointer (non-secure)
kevman 0:38ceb79fef03 340 \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
kevman 0:38ceb79fef03 341 \param [in] topOfMainStack Main Stack Pointer value to set
kevman 0:38ceb79fef03 342 */
kevman 0:38ceb79fef03 343 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
kevman 0:38ceb79fef03 344 {
kevman 0:38ceb79fef03 345 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
kevman 0:38ceb79fef03 346 }
kevman 0:38ceb79fef03 347 #endif
kevman 0:38ceb79fef03 348
kevman 0:38ceb79fef03 349
kevman 0:38ceb79fef03 350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 351 /**
kevman 0:38ceb79fef03 352 \brief Get Stack Pointer (non-secure)
kevman 0:38ceb79fef03 353 \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
kevman 0:38ceb79fef03 354 \return SP Register value
kevman 0:38ceb79fef03 355 */
kevman 0:38ceb79fef03 356 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
kevman 0:38ceb79fef03 357 {
kevman 0:38ceb79fef03 358 uint32_t result;
kevman 0:38ceb79fef03 359
kevman 0:38ceb79fef03 360 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
kevman 0:38ceb79fef03 361 return(result);
kevman 0:38ceb79fef03 362 }
kevman 0:38ceb79fef03 363
kevman 0:38ceb79fef03 364
kevman 0:38ceb79fef03 365 /**
kevman 0:38ceb79fef03 366 \brief Set Stack Pointer (non-secure)
kevman 0:38ceb79fef03 367 \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
kevman 0:38ceb79fef03 368 \param [in] topOfStack Stack Pointer value to set
kevman 0:38ceb79fef03 369 */
kevman 0:38ceb79fef03 370 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
kevman 0:38ceb79fef03 371 {
kevman 0:38ceb79fef03 372 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
kevman 0:38ceb79fef03 373 }
kevman 0:38ceb79fef03 374 #endif
kevman 0:38ceb79fef03 375
kevman 0:38ceb79fef03 376
kevman 0:38ceb79fef03 377 /**
kevman 0:38ceb79fef03 378 \brief Get Priority Mask
kevman 0:38ceb79fef03 379 \details Returns the current state of the priority mask bit from the Priority Mask Register.
kevman 0:38ceb79fef03 380 \return Priority Mask value
kevman 0:38ceb79fef03 381 */
kevman 0:38ceb79fef03 382 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
kevman 0:38ceb79fef03 383 {
kevman 0:38ceb79fef03 384 uint32_t result;
kevman 0:38ceb79fef03 385
kevman 0:38ceb79fef03 386 __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
kevman 0:38ceb79fef03 387 return(result);
kevman 0:38ceb79fef03 388 }
kevman 0:38ceb79fef03 389
kevman 0:38ceb79fef03 390
kevman 0:38ceb79fef03 391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 392 /**
kevman 0:38ceb79fef03 393 \brief Get Priority Mask (non-secure)
kevman 0:38ceb79fef03 394 \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
kevman 0:38ceb79fef03 395 \return Priority Mask value
kevman 0:38ceb79fef03 396 */
kevman 0:38ceb79fef03 397 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
kevman 0:38ceb79fef03 398 {
kevman 0:38ceb79fef03 399 uint32_t result;
kevman 0:38ceb79fef03 400
kevman 0:38ceb79fef03 401 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
kevman 0:38ceb79fef03 402 return(result);
kevman 0:38ceb79fef03 403 }
kevman 0:38ceb79fef03 404 #endif
kevman 0:38ceb79fef03 405
kevman 0:38ceb79fef03 406
kevman 0:38ceb79fef03 407 /**
kevman 0:38ceb79fef03 408 \brief Set Priority Mask
kevman 0:38ceb79fef03 409 \details Assigns the given value to the Priority Mask Register.
kevman 0:38ceb79fef03 410 \param [in] priMask Priority Mask
kevman 0:38ceb79fef03 411 */
kevman 0:38ceb79fef03 412 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
kevman 0:38ceb79fef03 413 {
kevman 0:38ceb79fef03 414 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
kevman 0:38ceb79fef03 415 }
kevman 0:38ceb79fef03 416
kevman 0:38ceb79fef03 417
kevman 0:38ceb79fef03 418 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 419 /**
kevman 0:38ceb79fef03 420 \brief Set Priority Mask (non-secure)
kevman 0:38ceb79fef03 421 \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
kevman 0:38ceb79fef03 422 \param [in] priMask Priority Mask
kevman 0:38ceb79fef03 423 */
kevman 0:38ceb79fef03 424 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
kevman 0:38ceb79fef03 425 {
kevman 0:38ceb79fef03 426 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
kevman 0:38ceb79fef03 427 }
kevman 0:38ceb79fef03 428 #endif
kevman 0:38ceb79fef03 429
kevman 0:38ceb79fef03 430
kevman 0:38ceb79fef03 431 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 432 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 433 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
kevman 0:38ceb79fef03 434 /**
kevman 0:38ceb79fef03 435 \brief Enable FIQ
kevman 0:38ceb79fef03 436 \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
kevman 0:38ceb79fef03 437 Can only be executed in Privileged modes.
kevman 0:38ceb79fef03 438 */
kevman 0:38ceb79fef03 439 __STATIC_FORCEINLINE void __enable_fault_irq(void)
kevman 0:38ceb79fef03 440 {
kevman 0:38ceb79fef03 441 __ASM volatile ("cpsie f" : : : "memory");
kevman 0:38ceb79fef03 442 }
kevman 0:38ceb79fef03 443
kevman 0:38ceb79fef03 444
kevman 0:38ceb79fef03 445 /**
kevman 0:38ceb79fef03 446 \brief Disable FIQ
kevman 0:38ceb79fef03 447 \details Disables FIQ interrupts by setting the F-bit in the CPSR.
kevman 0:38ceb79fef03 448 Can only be executed in Privileged modes.
kevman 0:38ceb79fef03 449 */
kevman 0:38ceb79fef03 450 __STATIC_FORCEINLINE void __disable_fault_irq(void)
kevman 0:38ceb79fef03 451 {
kevman 0:38ceb79fef03 452 __ASM volatile ("cpsid f" : : : "memory");
kevman 0:38ceb79fef03 453 }
kevman 0:38ceb79fef03 454
kevman 0:38ceb79fef03 455
kevman 0:38ceb79fef03 456 /**
kevman 0:38ceb79fef03 457 \brief Get Base Priority
kevman 0:38ceb79fef03 458 \details Returns the current value of the Base Priority register.
kevman 0:38ceb79fef03 459 \return Base Priority register value
kevman 0:38ceb79fef03 460 */
kevman 0:38ceb79fef03 461 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
kevman 0:38ceb79fef03 462 {
kevman 0:38ceb79fef03 463 uint32_t result;
kevman 0:38ceb79fef03 464
kevman 0:38ceb79fef03 465 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
kevman 0:38ceb79fef03 466 return(result);
kevman 0:38ceb79fef03 467 }
kevman 0:38ceb79fef03 468
kevman 0:38ceb79fef03 469
kevman 0:38ceb79fef03 470 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 471 /**
kevman 0:38ceb79fef03 472 \brief Get Base Priority (non-secure)
kevman 0:38ceb79fef03 473 \details Returns the current value of the non-secure Base Priority register when in secure state.
kevman 0:38ceb79fef03 474 \return Base Priority register value
kevman 0:38ceb79fef03 475 */
kevman 0:38ceb79fef03 476 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
kevman 0:38ceb79fef03 477 {
kevman 0:38ceb79fef03 478 uint32_t result;
kevman 0:38ceb79fef03 479
kevman 0:38ceb79fef03 480 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
kevman 0:38ceb79fef03 481 return(result);
kevman 0:38ceb79fef03 482 }
kevman 0:38ceb79fef03 483 #endif
kevman 0:38ceb79fef03 484
kevman 0:38ceb79fef03 485
kevman 0:38ceb79fef03 486 /**
kevman 0:38ceb79fef03 487 \brief Set Base Priority
kevman 0:38ceb79fef03 488 \details Assigns the given value to the Base Priority register.
kevman 0:38ceb79fef03 489 \param [in] basePri Base Priority value to set
kevman 0:38ceb79fef03 490 */
kevman 0:38ceb79fef03 491 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
kevman 0:38ceb79fef03 492 {
kevman 0:38ceb79fef03 493 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
kevman 0:38ceb79fef03 494 }
kevman 0:38ceb79fef03 495
kevman 0:38ceb79fef03 496
kevman 0:38ceb79fef03 497 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 498 /**
kevman 0:38ceb79fef03 499 \brief Set Base Priority (non-secure)
kevman 0:38ceb79fef03 500 \details Assigns the given value to the non-secure Base Priority register when in secure state.
kevman 0:38ceb79fef03 501 \param [in] basePri Base Priority value to set
kevman 0:38ceb79fef03 502 */
kevman 0:38ceb79fef03 503 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
kevman 0:38ceb79fef03 504 {
kevman 0:38ceb79fef03 505 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
kevman 0:38ceb79fef03 506 }
kevman 0:38ceb79fef03 507 #endif
kevman 0:38ceb79fef03 508
kevman 0:38ceb79fef03 509
kevman 0:38ceb79fef03 510 /**
kevman 0:38ceb79fef03 511 \brief Set Base Priority with condition
kevman 0:38ceb79fef03 512 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
kevman 0:38ceb79fef03 513 or the new value increases the BASEPRI priority level.
kevman 0:38ceb79fef03 514 \param [in] basePri Base Priority value to set
kevman 0:38ceb79fef03 515 */
kevman 0:38ceb79fef03 516 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
kevman 0:38ceb79fef03 517 {
kevman 0:38ceb79fef03 518 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
kevman 0:38ceb79fef03 519 }
kevman 0:38ceb79fef03 520
kevman 0:38ceb79fef03 521
kevman 0:38ceb79fef03 522 /**
kevman 0:38ceb79fef03 523 \brief Get Fault Mask
kevman 0:38ceb79fef03 524 \details Returns the current value of the Fault Mask register.
kevman 0:38ceb79fef03 525 \return Fault Mask register value
kevman 0:38ceb79fef03 526 */
kevman 0:38ceb79fef03 527 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
kevman 0:38ceb79fef03 528 {
kevman 0:38ceb79fef03 529 uint32_t result;
kevman 0:38ceb79fef03 530
kevman 0:38ceb79fef03 531 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
kevman 0:38ceb79fef03 532 return(result);
kevman 0:38ceb79fef03 533 }
kevman 0:38ceb79fef03 534
kevman 0:38ceb79fef03 535
kevman 0:38ceb79fef03 536 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 537 /**
kevman 0:38ceb79fef03 538 \brief Get Fault Mask (non-secure)
kevman 0:38ceb79fef03 539 \details Returns the current value of the non-secure Fault Mask register when in secure state.
kevman 0:38ceb79fef03 540 \return Fault Mask register value
kevman 0:38ceb79fef03 541 */
kevman 0:38ceb79fef03 542 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
kevman 0:38ceb79fef03 543 {
kevman 0:38ceb79fef03 544 uint32_t result;
kevman 0:38ceb79fef03 545
kevman 0:38ceb79fef03 546 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
kevman 0:38ceb79fef03 547 return(result);
kevman 0:38ceb79fef03 548 }
kevman 0:38ceb79fef03 549 #endif
kevman 0:38ceb79fef03 550
kevman 0:38ceb79fef03 551
kevman 0:38ceb79fef03 552 /**
kevman 0:38ceb79fef03 553 \brief Set Fault Mask
kevman 0:38ceb79fef03 554 \details Assigns the given value to the Fault Mask register.
kevman 0:38ceb79fef03 555 \param [in] faultMask Fault Mask value to set
kevman 0:38ceb79fef03 556 */
kevman 0:38ceb79fef03 557 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
kevman 0:38ceb79fef03 558 {
kevman 0:38ceb79fef03 559 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
kevman 0:38ceb79fef03 560 }
kevman 0:38ceb79fef03 561
kevman 0:38ceb79fef03 562
kevman 0:38ceb79fef03 563 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 564 /**
kevman 0:38ceb79fef03 565 \brief Set Fault Mask (non-secure)
kevman 0:38ceb79fef03 566 \details Assigns the given value to the non-secure Fault Mask register when in secure state.
kevman 0:38ceb79fef03 567 \param [in] faultMask Fault Mask value to set
kevman 0:38ceb79fef03 568 */
kevman 0:38ceb79fef03 569 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
kevman 0:38ceb79fef03 570 {
kevman 0:38ceb79fef03 571 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
kevman 0:38ceb79fef03 572 }
kevman 0:38ceb79fef03 573 #endif
kevman 0:38ceb79fef03 574
kevman 0:38ceb79fef03 575 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 576 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 577 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
kevman 0:38ceb79fef03 578
kevman 0:38ceb79fef03 579
kevman 0:38ceb79fef03 580 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
kevman 0:38ceb79fef03 581 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
kevman 0:38ceb79fef03 582
kevman 0:38ceb79fef03 583 /**
kevman 0:38ceb79fef03 584 \brief Get Process Stack Pointer Limit
kevman 0:38ceb79fef03 585 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 586 Stack Pointer Limit register hence zero is returned always in non-secure
kevman 0:38ceb79fef03 587 mode.
kevman 0:38ceb79fef03 588
kevman 0:38ceb79fef03 589 \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
kevman 0:38ceb79fef03 590 \return PSPLIM Register value
kevman 0:38ceb79fef03 591 */
kevman 0:38ceb79fef03 592 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
kevman 0:38ceb79fef03 593 {
kevman 0:38ceb79fef03 594 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
kevman 0:38ceb79fef03 595 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
kevman 0:38ceb79fef03 596 // without main extensions, the non-secure PSPLIM is RAZ/WI
kevman 0:38ceb79fef03 597 return 0U;
kevman 0:38ceb79fef03 598 #else
kevman 0:38ceb79fef03 599 uint32_t result;
kevman 0:38ceb79fef03 600 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
kevman 0:38ceb79fef03 601 return result;
kevman 0:38ceb79fef03 602 #endif
kevman 0:38ceb79fef03 603 }
kevman 0:38ceb79fef03 604
kevman 0:38ceb79fef03 605 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 606 /**
kevman 0:38ceb79fef03 607 \brief Get Process Stack Pointer Limit (non-secure)
kevman 0:38ceb79fef03 608 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 609 Stack Pointer Limit register hence zero is returned always.
kevman 0:38ceb79fef03 610
kevman 0:38ceb79fef03 611 \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
kevman 0:38ceb79fef03 612 \return PSPLIM Register value
kevman 0:38ceb79fef03 613 */
kevman 0:38ceb79fef03 614 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
kevman 0:38ceb79fef03 615 {
kevman 0:38ceb79fef03 616 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
kevman 0:38ceb79fef03 617 // without main extensions, the non-secure PSPLIM is RAZ/WI
kevman 0:38ceb79fef03 618 return 0U;
kevman 0:38ceb79fef03 619 #else
kevman 0:38ceb79fef03 620 uint32_t result;
kevman 0:38ceb79fef03 621 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
kevman 0:38ceb79fef03 622 return result;
kevman 0:38ceb79fef03 623 #endif
kevman 0:38ceb79fef03 624 }
kevman 0:38ceb79fef03 625 #endif
kevman 0:38ceb79fef03 626
kevman 0:38ceb79fef03 627
kevman 0:38ceb79fef03 628 /**
kevman 0:38ceb79fef03 629 \brief Set Process Stack Pointer Limit
kevman 0:38ceb79fef03 630 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 631 Stack Pointer Limit register hence the write is silently ignored in non-secure
kevman 0:38ceb79fef03 632 mode.
kevman 0:38ceb79fef03 633
kevman 0:38ceb79fef03 634 \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
kevman 0:38ceb79fef03 635 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
kevman 0:38ceb79fef03 636 */
kevman 0:38ceb79fef03 637 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
kevman 0:38ceb79fef03 638 {
kevman 0:38ceb79fef03 639 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
kevman 0:38ceb79fef03 640 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
kevman 0:38ceb79fef03 641 // without main extensions, the non-secure PSPLIM is RAZ/WI
kevman 0:38ceb79fef03 642 (void)ProcStackPtrLimit;
kevman 0:38ceb79fef03 643 #else
kevman 0:38ceb79fef03 644 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
kevman 0:38ceb79fef03 645 #endif
kevman 0:38ceb79fef03 646 }
kevman 0:38ceb79fef03 647
kevman 0:38ceb79fef03 648
kevman 0:38ceb79fef03 649 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 650 /**
kevman 0:38ceb79fef03 651 \brief Set Process Stack Pointer (non-secure)
kevman 0:38ceb79fef03 652 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 653 Stack Pointer Limit register hence the write is silently ignored.
kevman 0:38ceb79fef03 654
kevman 0:38ceb79fef03 655 \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
kevman 0:38ceb79fef03 656 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
kevman 0:38ceb79fef03 657 */
kevman 0:38ceb79fef03 658 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
kevman 0:38ceb79fef03 659 {
kevman 0:38ceb79fef03 660 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
kevman 0:38ceb79fef03 661 // without main extensions, the non-secure PSPLIM is RAZ/WI
kevman 0:38ceb79fef03 662 (void)ProcStackPtrLimit;
kevman 0:38ceb79fef03 663 #else
kevman 0:38ceb79fef03 664 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
kevman 0:38ceb79fef03 665 #endif
kevman 0:38ceb79fef03 666 }
kevman 0:38ceb79fef03 667 #endif
kevman 0:38ceb79fef03 668
kevman 0:38ceb79fef03 669
kevman 0:38ceb79fef03 670 /**
kevman 0:38ceb79fef03 671 \brief Get Main Stack Pointer Limit
kevman 0:38ceb79fef03 672 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 673 Stack Pointer Limit register hence zero is returned always in non-secure
kevman 0:38ceb79fef03 674 mode.
kevman 0:38ceb79fef03 675
kevman 0:38ceb79fef03 676 \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
kevman 0:38ceb79fef03 677 \return MSPLIM Register value
kevman 0:38ceb79fef03 678 */
kevman 0:38ceb79fef03 679 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
kevman 0:38ceb79fef03 680 {
kevman 0:38ceb79fef03 681 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
kevman 0:38ceb79fef03 682 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
kevman 0:38ceb79fef03 683 // without main extensions, the non-secure MSPLIM is RAZ/WI
kevman 0:38ceb79fef03 684 return 0U;
kevman 0:38ceb79fef03 685 #else
kevman 0:38ceb79fef03 686 uint32_t result;
kevman 0:38ceb79fef03 687 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
kevman 0:38ceb79fef03 688 return result;
kevman 0:38ceb79fef03 689 #endif
kevman 0:38ceb79fef03 690 }
kevman 0:38ceb79fef03 691
kevman 0:38ceb79fef03 692
kevman 0:38ceb79fef03 693 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 694 /**
kevman 0:38ceb79fef03 695 \brief Get Main Stack Pointer Limit (non-secure)
kevman 0:38ceb79fef03 696 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 697 Stack Pointer Limit register hence zero is returned always.
kevman 0:38ceb79fef03 698
kevman 0:38ceb79fef03 699 \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
kevman 0:38ceb79fef03 700 \return MSPLIM Register value
kevman 0:38ceb79fef03 701 */
kevman 0:38ceb79fef03 702 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
kevman 0:38ceb79fef03 703 {
kevman 0:38ceb79fef03 704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
kevman 0:38ceb79fef03 705 // without main extensions, the non-secure MSPLIM is RAZ/WI
kevman 0:38ceb79fef03 706 return 0U;
kevman 0:38ceb79fef03 707 #else
kevman 0:38ceb79fef03 708 uint32_t result;
kevman 0:38ceb79fef03 709 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
kevman 0:38ceb79fef03 710 return result;
kevman 0:38ceb79fef03 711 #endif
kevman 0:38ceb79fef03 712 }
kevman 0:38ceb79fef03 713 #endif
kevman 0:38ceb79fef03 714
kevman 0:38ceb79fef03 715
kevman 0:38ceb79fef03 716 /**
kevman 0:38ceb79fef03 717 \brief Set Main Stack Pointer Limit
kevman 0:38ceb79fef03 718 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 719 Stack Pointer Limit register hence the write is silently ignored in non-secure
kevman 0:38ceb79fef03 720 mode.
kevman 0:38ceb79fef03 721
kevman 0:38ceb79fef03 722 \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
kevman 0:38ceb79fef03 723 \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
kevman 0:38ceb79fef03 724 */
kevman 0:38ceb79fef03 725 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
kevman 0:38ceb79fef03 726 {
kevman 0:38ceb79fef03 727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
kevman 0:38ceb79fef03 728 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
kevman 0:38ceb79fef03 729 // without main extensions, the non-secure MSPLIM is RAZ/WI
kevman 0:38ceb79fef03 730 (void)MainStackPtrLimit;
kevman 0:38ceb79fef03 731 #else
kevman 0:38ceb79fef03 732 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
kevman 0:38ceb79fef03 733 #endif
kevman 0:38ceb79fef03 734 }
kevman 0:38ceb79fef03 735
kevman 0:38ceb79fef03 736
kevman 0:38ceb79fef03 737 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
kevman 0:38ceb79fef03 738 /**
kevman 0:38ceb79fef03 739 \brief Set Main Stack Pointer Limit (non-secure)
kevman 0:38ceb79fef03 740 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
kevman 0:38ceb79fef03 741 Stack Pointer Limit register hence the write is silently ignored.
kevman 0:38ceb79fef03 742
kevman 0:38ceb79fef03 743 \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
kevman 0:38ceb79fef03 744 \param [in] MainStackPtrLimit Main Stack Pointer value to set
kevman 0:38ceb79fef03 745 */
kevman 0:38ceb79fef03 746 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
kevman 0:38ceb79fef03 747 {
kevman 0:38ceb79fef03 748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
kevman 0:38ceb79fef03 749 // without main extensions, the non-secure MSPLIM is RAZ/WI
kevman 0:38ceb79fef03 750 (void)MainStackPtrLimit;
kevman 0:38ceb79fef03 751 #else
kevman 0:38ceb79fef03 752 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
kevman 0:38ceb79fef03 753 #endif
kevman 0:38ceb79fef03 754 }
kevman 0:38ceb79fef03 755 #endif
kevman 0:38ceb79fef03 756
kevman 0:38ceb79fef03 757 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
kevman 0:38ceb79fef03 758 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
kevman 0:38ceb79fef03 759
kevman 0:38ceb79fef03 760
kevman 0:38ceb79fef03 761 /**
kevman 0:38ceb79fef03 762 \brief Get FPSCR
kevman 0:38ceb79fef03 763 \details Returns the current value of the Floating Point Status/Control register.
kevman 0:38ceb79fef03 764 \return Floating Point Status/Control register value
kevman 0:38ceb79fef03 765 */
kevman 0:38ceb79fef03 766 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
kevman 0:38ceb79fef03 767 {
kevman 0:38ceb79fef03 768 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
kevman 0:38ceb79fef03 769 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
kevman 0:38ceb79fef03 770 #if __has_builtin(__builtin_arm_get_fpscr)
kevman 0:38ceb79fef03 771 // Re-enable using built-in when GCC has been fixed
kevman 0:38ceb79fef03 772 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
kevman 0:38ceb79fef03 773 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
kevman 0:38ceb79fef03 774 return __builtin_arm_get_fpscr();
kevman 0:38ceb79fef03 775 #else
kevman 0:38ceb79fef03 776 uint32_t result;
kevman 0:38ceb79fef03 777
kevman 0:38ceb79fef03 778 __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
kevman 0:38ceb79fef03 779 return(result);
kevman 0:38ceb79fef03 780 #endif
kevman 0:38ceb79fef03 781 #else
kevman 0:38ceb79fef03 782 return(0U);
kevman 0:38ceb79fef03 783 #endif
kevman 0:38ceb79fef03 784 }
kevman 0:38ceb79fef03 785
kevman 0:38ceb79fef03 786
kevman 0:38ceb79fef03 787 /**
kevman 0:38ceb79fef03 788 \brief Set FPSCR
kevman 0:38ceb79fef03 789 \details Assigns the given value to the Floating Point Status/Control register.
kevman 0:38ceb79fef03 790 \param [in] fpscr Floating Point Status/Control value to set
kevman 0:38ceb79fef03 791 */
kevman 0:38ceb79fef03 792 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
kevman 0:38ceb79fef03 793 {
kevman 0:38ceb79fef03 794 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
kevman 0:38ceb79fef03 795 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
kevman 0:38ceb79fef03 796 #if __has_builtin(__builtin_arm_set_fpscr)
kevman 0:38ceb79fef03 797 // Re-enable using built-in when GCC has been fixed
kevman 0:38ceb79fef03 798 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
kevman 0:38ceb79fef03 799 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
kevman 0:38ceb79fef03 800 __builtin_arm_set_fpscr(fpscr);
kevman 0:38ceb79fef03 801 #else
kevman 0:38ceb79fef03 802 __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
kevman 0:38ceb79fef03 803 #endif
kevman 0:38ceb79fef03 804 #else
kevman 0:38ceb79fef03 805 (void)fpscr;
kevman 0:38ceb79fef03 806 #endif
kevman 0:38ceb79fef03 807 }
kevman 0:38ceb79fef03 808
kevman 0:38ceb79fef03 809
kevman 0:38ceb79fef03 810 /*@} end of CMSIS_Core_RegAccFunctions */
kevman 0:38ceb79fef03 811
kevman 0:38ceb79fef03 812
kevman 0:38ceb79fef03 813 /* ########################## Core Instruction Access ######################### */
kevman 0:38ceb79fef03 814 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
kevman 0:38ceb79fef03 815 Access to dedicated instructions
kevman 0:38ceb79fef03 816 @{
kevman 0:38ceb79fef03 817 */
kevman 0:38ceb79fef03 818
kevman 0:38ceb79fef03 819 /* Define macros for porting to both thumb1 and thumb2.
kevman 0:38ceb79fef03 820 * For thumb1, use low register (r0-r7), specified by constraint "l"
kevman 0:38ceb79fef03 821 * Otherwise, use general registers, specified by constraint "r" */
kevman 0:38ceb79fef03 822 #if defined (__thumb__) && !defined (__thumb2__)
kevman 0:38ceb79fef03 823 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
kevman 0:38ceb79fef03 824 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
kevman 0:38ceb79fef03 825 #define __CMSIS_GCC_USE_REG(r) "l" (r)
kevman 0:38ceb79fef03 826 #else
kevman 0:38ceb79fef03 827 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
kevman 0:38ceb79fef03 828 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
kevman 0:38ceb79fef03 829 #define __CMSIS_GCC_USE_REG(r) "r" (r)
kevman 0:38ceb79fef03 830 #endif
kevman 0:38ceb79fef03 831
kevman 0:38ceb79fef03 832 /**
kevman 0:38ceb79fef03 833 \brief No Operation
kevman 0:38ceb79fef03 834 \details No Operation does nothing. This instruction can be used for code alignment purposes.
kevman 0:38ceb79fef03 835 */
kevman 0:38ceb79fef03 836 #define __NOP() __ASM volatile ("nop")
kevman 0:38ceb79fef03 837
kevman 0:38ceb79fef03 838 /**
kevman 0:38ceb79fef03 839 \brief Wait For Interrupt
kevman 0:38ceb79fef03 840 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
kevman 0:38ceb79fef03 841 */
kevman 0:38ceb79fef03 842 #define __WFI() __ASM volatile ("wfi")
kevman 0:38ceb79fef03 843
kevman 0:38ceb79fef03 844
kevman 0:38ceb79fef03 845 /**
kevman 0:38ceb79fef03 846 \brief Wait For Event
kevman 0:38ceb79fef03 847 \details Wait For Event is a hint instruction that permits the processor to enter
kevman 0:38ceb79fef03 848 a low-power state until one of a number of events occurs.
kevman 0:38ceb79fef03 849 */
kevman 0:38ceb79fef03 850 #define __WFE() __ASM volatile ("wfe")
kevman 0:38ceb79fef03 851
kevman 0:38ceb79fef03 852
kevman 0:38ceb79fef03 853 /**
kevman 0:38ceb79fef03 854 \brief Send Event
kevman 0:38ceb79fef03 855 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
kevman 0:38ceb79fef03 856 */
kevman 0:38ceb79fef03 857 #define __SEV() __ASM volatile ("sev")
kevman 0:38ceb79fef03 858
kevman 0:38ceb79fef03 859
kevman 0:38ceb79fef03 860 /**
kevman 0:38ceb79fef03 861 \brief Instruction Synchronization Barrier
kevman 0:38ceb79fef03 862 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
kevman 0:38ceb79fef03 863 so that all instructions following the ISB are fetched from cache or memory,
kevman 0:38ceb79fef03 864 after the instruction has been completed.
kevman 0:38ceb79fef03 865 */
kevman 0:38ceb79fef03 866 __STATIC_FORCEINLINE void __ISB(void)
kevman 0:38ceb79fef03 867 {
kevman 0:38ceb79fef03 868 __ASM volatile ("isb 0xF":::"memory");
kevman 0:38ceb79fef03 869 }
kevman 0:38ceb79fef03 870
kevman 0:38ceb79fef03 871
kevman 0:38ceb79fef03 872 /**
kevman 0:38ceb79fef03 873 \brief Data Synchronization Barrier
kevman 0:38ceb79fef03 874 \details Acts as a special kind of Data Memory Barrier.
kevman 0:38ceb79fef03 875 It completes when all explicit memory accesses before this instruction complete.
kevman 0:38ceb79fef03 876 */
kevman 0:38ceb79fef03 877 __STATIC_FORCEINLINE void __DSB(void)
kevman 0:38ceb79fef03 878 {
kevman 0:38ceb79fef03 879 __ASM volatile ("dsb 0xF":::"memory");
kevman 0:38ceb79fef03 880 }
kevman 0:38ceb79fef03 881
kevman 0:38ceb79fef03 882
kevman 0:38ceb79fef03 883 /**
kevman 0:38ceb79fef03 884 \brief Data Memory Barrier
kevman 0:38ceb79fef03 885 \details Ensures the apparent order of the explicit memory operations before
kevman 0:38ceb79fef03 886 and after the instruction, without ensuring their completion.
kevman 0:38ceb79fef03 887 */
kevman 0:38ceb79fef03 888 __STATIC_FORCEINLINE void __DMB(void)
kevman 0:38ceb79fef03 889 {
kevman 0:38ceb79fef03 890 __ASM volatile ("dmb 0xF":::"memory");
kevman 0:38ceb79fef03 891 }
kevman 0:38ceb79fef03 892
kevman 0:38ceb79fef03 893
kevman 0:38ceb79fef03 894 /**
kevman 0:38ceb79fef03 895 \brief Reverse byte order (32 bit)
kevman 0:38ceb79fef03 896 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
kevman 0:38ceb79fef03 897 \param [in] value Value to reverse
kevman 0:38ceb79fef03 898 \return Reversed value
kevman 0:38ceb79fef03 899 */
kevman 0:38ceb79fef03 900 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
kevman 0:38ceb79fef03 901 {
kevman 0:38ceb79fef03 902 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
kevman 0:38ceb79fef03 903 return __builtin_bswap32(value);
kevman 0:38ceb79fef03 904 #else
kevman 0:38ceb79fef03 905 uint32_t result;
kevman 0:38ceb79fef03 906
kevman 0:38ceb79fef03 907 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
kevman 0:38ceb79fef03 908 return result;
kevman 0:38ceb79fef03 909 #endif
kevman 0:38ceb79fef03 910 }
kevman 0:38ceb79fef03 911
kevman 0:38ceb79fef03 912
kevman 0:38ceb79fef03 913 /**
kevman 0:38ceb79fef03 914 \brief Reverse byte order (16 bit)
kevman 0:38ceb79fef03 915 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
kevman 0:38ceb79fef03 916 \param [in] value Value to reverse
kevman 0:38ceb79fef03 917 \return Reversed value
kevman 0:38ceb79fef03 918 */
kevman 0:38ceb79fef03 919 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
kevman 0:38ceb79fef03 920 {
kevman 0:38ceb79fef03 921 uint32_t result;
kevman 0:38ceb79fef03 922
kevman 0:38ceb79fef03 923 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
kevman 0:38ceb79fef03 924 return result;
kevman 0:38ceb79fef03 925 }
kevman 0:38ceb79fef03 926
kevman 0:38ceb79fef03 927
kevman 0:38ceb79fef03 928 /**
kevman 0:38ceb79fef03 929 \brief Reverse byte order (16 bit)
kevman 0:38ceb79fef03 930 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
kevman 0:38ceb79fef03 931 \param [in] value Value to reverse
kevman 0:38ceb79fef03 932 \return Reversed value
kevman 0:38ceb79fef03 933 */
kevman 0:38ceb79fef03 934 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
kevman 0:38ceb79fef03 935 {
kevman 0:38ceb79fef03 936 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
kevman 0:38ceb79fef03 937 return (int16_t)__builtin_bswap16(value);
kevman 0:38ceb79fef03 938 #else
kevman 0:38ceb79fef03 939 int16_t result;
kevman 0:38ceb79fef03 940
kevman 0:38ceb79fef03 941 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
kevman 0:38ceb79fef03 942 return result;
kevman 0:38ceb79fef03 943 #endif
kevman 0:38ceb79fef03 944 }
kevman 0:38ceb79fef03 945
kevman 0:38ceb79fef03 946
kevman 0:38ceb79fef03 947 /**
kevman 0:38ceb79fef03 948 \brief Rotate Right in unsigned value (32 bit)
kevman 0:38ceb79fef03 949 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
kevman 0:38ceb79fef03 950 \param [in] op1 Value to rotate
kevman 0:38ceb79fef03 951 \param [in] op2 Number of Bits to rotate
kevman 0:38ceb79fef03 952 \return Rotated value
kevman 0:38ceb79fef03 953 */
kevman 0:38ceb79fef03 954 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 955 {
kevman 0:38ceb79fef03 956 op2 %= 32U;
kevman 0:38ceb79fef03 957 if (op2 == 0U)
kevman 0:38ceb79fef03 958 {
kevman 0:38ceb79fef03 959 return op1;
kevman 0:38ceb79fef03 960 }
kevman 0:38ceb79fef03 961 return (op1 >> op2) | (op1 << (32U - op2));
kevman 0:38ceb79fef03 962 }
kevman 0:38ceb79fef03 963
kevman 0:38ceb79fef03 964
kevman 0:38ceb79fef03 965 /**
kevman 0:38ceb79fef03 966 \brief Breakpoint
kevman 0:38ceb79fef03 967 \details Causes the processor to enter Debug state.
kevman 0:38ceb79fef03 968 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
kevman 0:38ceb79fef03 969 \param [in] value is ignored by the processor.
kevman 0:38ceb79fef03 970 If required, a debugger can use it to store additional information about the breakpoint.
kevman 0:38ceb79fef03 971 */
kevman 0:38ceb79fef03 972 #define __BKPT(value) __ASM volatile ("bkpt "#value)
kevman 0:38ceb79fef03 973
kevman 0:38ceb79fef03 974
kevman 0:38ceb79fef03 975 /**
kevman 0:38ceb79fef03 976 \brief Reverse bit order of value
kevman 0:38ceb79fef03 977 \details Reverses the bit order of the given value.
kevman 0:38ceb79fef03 978 \param [in] value Value to reverse
kevman 0:38ceb79fef03 979 \return Reversed value
kevman 0:38ceb79fef03 980 */
kevman 0:38ceb79fef03 981 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
kevman 0:38ceb79fef03 982 {
kevman 0:38ceb79fef03 983 uint32_t result;
kevman 0:38ceb79fef03 984
kevman 0:38ceb79fef03 985 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 986 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 987 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
kevman 0:38ceb79fef03 988 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
kevman 0:38ceb79fef03 989 #else
kevman 0:38ceb79fef03 990 uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
kevman 0:38ceb79fef03 991
kevman 0:38ceb79fef03 992 result = value; /* r will be reversed bits of v; first get LSB of v */
kevman 0:38ceb79fef03 993 for (value >>= 1U; value != 0U; value >>= 1U)
kevman 0:38ceb79fef03 994 {
kevman 0:38ceb79fef03 995 result <<= 1U;
kevman 0:38ceb79fef03 996 result |= value & 1U;
kevman 0:38ceb79fef03 997 s--;
kevman 0:38ceb79fef03 998 }
kevman 0:38ceb79fef03 999 result <<= s; /* shift when v's highest bits are zero */
kevman 0:38ceb79fef03 1000 #endif
kevman 0:38ceb79fef03 1001 return result;
kevman 0:38ceb79fef03 1002 }
kevman 0:38ceb79fef03 1003
kevman 0:38ceb79fef03 1004
kevman 0:38ceb79fef03 1005 /**
kevman 0:38ceb79fef03 1006 \brief Count leading zeros
kevman 0:38ceb79fef03 1007 \details Counts the number of leading zeros of a data value.
kevman 0:38ceb79fef03 1008 \param [in] value Value to count the leading zeros
kevman 0:38ceb79fef03 1009 \return number of leading zeros in value
kevman 0:38ceb79fef03 1010 */
kevman 0:38ceb79fef03 1011 #define __CLZ (uint8_t)__builtin_clz
kevman 0:38ceb79fef03 1012
kevman 0:38ceb79fef03 1013
kevman 0:38ceb79fef03 1014 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 1015 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 1016 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
kevman 0:38ceb79fef03 1017 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
kevman 0:38ceb79fef03 1018 /**
kevman 0:38ceb79fef03 1019 \brief LDR Exclusive (8 bit)
kevman 0:38ceb79fef03 1020 \details Executes a exclusive LDR instruction for 8 bit value.
kevman 0:38ceb79fef03 1021 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1022 \return value of type uint8_t at (*ptr)
kevman 0:38ceb79fef03 1023 */
kevman 0:38ceb79fef03 1024 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
kevman 0:38ceb79fef03 1025 {
kevman 0:38ceb79fef03 1026 uint32_t result;
kevman 0:38ceb79fef03 1027
kevman 0:38ceb79fef03 1028 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
kevman 0:38ceb79fef03 1029 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
kevman 0:38ceb79fef03 1030 #else
kevman 0:38ceb79fef03 1031 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
kevman 0:38ceb79fef03 1032 accepted by assembler. So has to use following less efficient pattern.
kevman 0:38ceb79fef03 1033 */
kevman 0:38ceb79fef03 1034 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
kevman 0:38ceb79fef03 1035 #endif
kevman 0:38ceb79fef03 1036 return ((uint8_t) result); /* Add explicit type cast here */
kevman 0:38ceb79fef03 1037 }
kevman 0:38ceb79fef03 1038
kevman 0:38ceb79fef03 1039
kevman 0:38ceb79fef03 1040 /**
kevman 0:38ceb79fef03 1041 \brief LDR Exclusive (16 bit)
kevman 0:38ceb79fef03 1042 \details Executes a exclusive LDR instruction for 16 bit values.
kevman 0:38ceb79fef03 1043 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1044 \return value of type uint16_t at (*ptr)
kevman 0:38ceb79fef03 1045 */
kevman 0:38ceb79fef03 1046 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
kevman 0:38ceb79fef03 1047 {
kevman 0:38ceb79fef03 1048 uint32_t result;
kevman 0:38ceb79fef03 1049
kevman 0:38ceb79fef03 1050 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
kevman 0:38ceb79fef03 1051 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
kevman 0:38ceb79fef03 1052 #else
kevman 0:38ceb79fef03 1053 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
kevman 0:38ceb79fef03 1054 accepted by assembler. So has to use following less efficient pattern.
kevman 0:38ceb79fef03 1055 */
kevman 0:38ceb79fef03 1056 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
kevman 0:38ceb79fef03 1057 #endif
kevman 0:38ceb79fef03 1058 return ((uint16_t) result); /* Add explicit type cast here */
kevman 0:38ceb79fef03 1059 }
kevman 0:38ceb79fef03 1060
kevman 0:38ceb79fef03 1061
kevman 0:38ceb79fef03 1062 /**
kevman 0:38ceb79fef03 1063 \brief LDR Exclusive (32 bit)
kevman 0:38ceb79fef03 1064 \details Executes a exclusive LDR instruction for 32 bit values.
kevman 0:38ceb79fef03 1065 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1066 \return value of type uint32_t at (*ptr)
kevman 0:38ceb79fef03 1067 */
kevman 0:38ceb79fef03 1068 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
kevman 0:38ceb79fef03 1069 {
kevman 0:38ceb79fef03 1070 uint32_t result;
kevman 0:38ceb79fef03 1071
kevman 0:38ceb79fef03 1072 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
kevman 0:38ceb79fef03 1073 return(result);
kevman 0:38ceb79fef03 1074 }
kevman 0:38ceb79fef03 1075
kevman 0:38ceb79fef03 1076
kevman 0:38ceb79fef03 1077 /**
kevman 0:38ceb79fef03 1078 \brief STR Exclusive (8 bit)
kevman 0:38ceb79fef03 1079 \details Executes a exclusive STR instruction for 8 bit values.
kevman 0:38ceb79fef03 1080 \param [in] value Value to store
kevman 0:38ceb79fef03 1081 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1082 \return 0 Function succeeded
kevman 0:38ceb79fef03 1083 \return 1 Function failed
kevman 0:38ceb79fef03 1084 */
kevman 0:38ceb79fef03 1085 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
kevman 0:38ceb79fef03 1086 {
kevman 0:38ceb79fef03 1087 uint32_t result;
kevman 0:38ceb79fef03 1088
kevman 0:38ceb79fef03 1089 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1090 return(result);
kevman 0:38ceb79fef03 1091 }
kevman 0:38ceb79fef03 1092
kevman 0:38ceb79fef03 1093
kevman 0:38ceb79fef03 1094 /**
kevman 0:38ceb79fef03 1095 \brief STR Exclusive (16 bit)
kevman 0:38ceb79fef03 1096 \details Executes a exclusive STR instruction for 16 bit values.
kevman 0:38ceb79fef03 1097 \param [in] value Value to store
kevman 0:38ceb79fef03 1098 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1099 \return 0 Function succeeded
kevman 0:38ceb79fef03 1100 \return 1 Function failed
kevman 0:38ceb79fef03 1101 */
kevman 0:38ceb79fef03 1102 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
kevman 0:38ceb79fef03 1103 {
kevman 0:38ceb79fef03 1104 uint32_t result;
kevman 0:38ceb79fef03 1105
kevman 0:38ceb79fef03 1106 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1107 return(result);
kevman 0:38ceb79fef03 1108 }
kevman 0:38ceb79fef03 1109
kevman 0:38ceb79fef03 1110
kevman 0:38ceb79fef03 1111 /**
kevman 0:38ceb79fef03 1112 \brief STR Exclusive (32 bit)
kevman 0:38ceb79fef03 1113 \details Executes a exclusive STR instruction for 32 bit values.
kevman 0:38ceb79fef03 1114 \param [in] value Value to store
kevman 0:38ceb79fef03 1115 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1116 \return 0 Function succeeded
kevman 0:38ceb79fef03 1117 \return 1 Function failed
kevman 0:38ceb79fef03 1118 */
kevman 0:38ceb79fef03 1119 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
kevman 0:38ceb79fef03 1120 {
kevman 0:38ceb79fef03 1121 uint32_t result;
kevman 0:38ceb79fef03 1122
kevman 0:38ceb79fef03 1123 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
kevman 0:38ceb79fef03 1124 return(result);
kevman 0:38ceb79fef03 1125 }
kevman 0:38ceb79fef03 1126
kevman 0:38ceb79fef03 1127
kevman 0:38ceb79fef03 1128 /**
kevman 0:38ceb79fef03 1129 \brief Remove the exclusive lock
kevman 0:38ceb79fef03 1130 \details Removes the exclusive lock which is created by LDREX.
kevman 0:38ceb79fef03 1131 */
kevman 0:38ceb79fef03 1132 __STATIC_FORCEINLINE void __CLREX(void)
kevman 0:38ceb79fef03 1133 {
kevman 0:38ceb79fef03 1134 __ASM volatile ("clrex" ::: "memory");
kevman 0:38ceb79fef03 1135 }
kevman 0:38ceb79fef03 1136
kevman 0:38ceb79fef03 1137 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 1138 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 1139 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
kevman 0:38ceb79fef03 1140 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
kevman 0:38ceb79fef03 1141
kevman 0:38ceb79fef03 1142
kevman 0:38ceb79fef03 1143 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 1144 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 1145 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
kevman 0:38ceb79fef03 1146 /**
kevman 0:38ceb79fef03 1147 \brief Signed Saturate
kevman 0:38ceb79fef03 1148 \details Saturates a signed value.
kevman 0:38ceb79fef03 1149 \param [in] ARG1 Value to be saturated
kevman 0:38ceb79fef03 1150 \param [in] ARG2 Bit position to saturate to (1..32)
kevman 0:38ceb79fef03 1151 \return Saturated value
kevman 0:38ceb79fef03 1152 */
kevman 0:38ceb79fef03 1153 #define __SSAT(ARG1,ARG2) \
kevman 0:38ceb79fef03 1154 __extension__ \
kevman 0:38ceb79fef03 1155 ({ \
kevman 0:38ceb79fef03 1156 int32_t __RES, __ARG1 = (ARG1); \
kevman 0:38ceb79fef03 1157 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
kevman 0:38ceb79fef03 1158 __RES; \
kevman 0:38ceb79fef03 1159 })
kevman 0:38ceb79fef03 1160
kevman 0:38ceb79fef03 1161
kevman 0:38ceb79fef03 1162 /**
kevman 0:38ceb79fef03 1163 \brief Unsigned Saturate
kevman 0:38ceb79fef03 1164 \details Saturates an unsigned value.
kevman 0:38ceb79fef03 1165 \param [in] ARG1 Value to be saturated
kevman 0:38ceb79fef03 1166 \param [in] ARG2 Bit position to saturate to (0..31)
kevman 0:38ceb79fef03 1167 \return Saturated value
kevman 0:38ceb79fef03 1168 */
kevman 0:38ceb79fef03 1169 #define __USAT(ARG1,ARG2) \
kevman 0:38ceb79fef03 1170 __extension__ \
kevman 0:38ceb79fef03 1171 ({ \
kevman 0:38ceb79fef03 1172 uint32_t __RES, __ARG1 = (ARG1); \
kevman 0:38ceb79fef03 1173 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
kevman 0:38ceb79fef03 1174 __RES; \
kevman 0:38ceb79fef03 1175 })
kevman 0:38ceb79fef03 1176
kevman 0:38ceb79fef03 1177
kevman 0:38ceb79fef03 1178 /**
kevman 0:38ceb79fef03 1179 \brief Rotate Right with Extend (32 bit)
kevman 0:38ceb79fef03 1180 \details Moves each bit of a bitstring right by one bit.
kevman 0:38ceb79fef03 1181 The carry input is shifted in at the left end of the bitstring.
kevman 0:38ceb79fef03 1182 \param [in] value Value to rotate
kevman 0:38ceb79fef03 1183 \return Rotated value
kevman 0:38ceb79fef03 1184 */
kevman 0:38ceb79fef03 1185 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
kevman 0:38ceb79fef03 1186 {
kevman 0:38ceb79fef03 1187 uint32_t result;
kevman 0:38ceb79fef03 1188
kevman 0:38ceb79fef03 1189 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
kevman 0:38ceb79fef03 1190 return(result);
kevman 0:38ceb79fef03 1191 }
kevman 0:38ceb79fef03 1192
kevman 0:38ceb79fef03 1193
kevman 0:38ceb79fef03 1194 /**
kevman 0:38ceb79fef03 1195 \brief LDRT Unprivileged (8 bit)
kevman 0:38ceb79fef03 1196 \details Executes a Unprivileged LDRT instruction for 8 bit value.
kevman 0:38ceb79fef03 1197 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1198 \return value of type uint8_t at (*ptr)
kevman 0:38ceb79fef03 1199 */
kevman 0:38ceb79fef03 1200 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
kevman 0:38ceb79fef03 1201 {
kevman 0:38ceb79fef03 1202 uint32_t result;
kevman 0:38ceb79fef03 1203
kevman 0:38ceb79fef03 1204 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
kevman 0:38ceb79fef03 1205 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1206 #else
kevman 0:38ceb79fef03 1207 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
kevman 0:38ceb79fef03 1208 accepted by assembler. So has to use following less efficient pattern.
kevman 0:38ceb79fef03 1209 */
kevman 0:38ceb79fef03 1210 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
kevman 0:38ceb79fef03 1211 #endif
kevman 0:38ceb79fef03 1212 return ((uint8_t) result); /* Add explicit type cast here */
kevman 0:38ceb79fef03 1213 }
kevman 0:38ceb79fef03 1214
kevman 0:38ceb79fef03 1215
kevman 0:38ceb79fef03 1216 /**
kevman 0:38ceb79fef03 1217 \brief LDRT Unprivileged (16 bit)
kevman 0:38ceb79fef03 1218 \details Executes a Unprivileged LDRT instruction for 16 bit values.
kevman 0:38ceb79fef03 1219 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1220 \return value of type uint16_t at (*ptr)
kevman 0:38ceb79fef03 1221 */
kevman 0:38ceb79fef03 1222 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
kevman 0:38ceb79fef03 1223 {
kevman 0:38ceb79fef03 1224 uint32_t result;
kevman 0:38ceb79fef03 1225
kevman 0:38ceb79fef03 1226 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
kevman 0:38ceb79fef03 1227 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1228 #else
kevman 0:38ceb79fef03 1229 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
kevman 0:38ceb79fef03 1230 accepted by assembler. So has to use following less efficient pattern.
kevman 0:38ceb79fef03 1231 */
kevman 0:38ceb79fef03 1232 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
kevman 0:38ceb79fef03 1233 #endif
kevman 0:38ceb79fef03 1234 return ((uint16_t) result); /* Add explicit type cast here */
kevman 0:38ceb79fef03 1235 }
kevman 0:38ceb79fef03 1236
kevman 0:38ceb79fef03 1237
kevman 0:38ceb79fef03 1238 /**
kevman 0:38ceb79fef03 1239 \brief LDRT Unprivileged (32 bit)
kevman 0:38ceb79fef03 1240 \details Executes a Unprivileged LDRT instruction for 32 bit values.
kevman 0:38ceb79fef03 1241 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1242 \return value of type uint32_t at (*ptr)
kevman 0:38ceb79fef03 1243 */
kevman 0:38ceb79fef03 1244 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
kevman 0:38ceb79fef03 1245 {
kevman 0:38ceb79fef03 1246 uint32_t result;
kevman 0:38ceb79fef03 1247
kevman 0:38ceb79fef03 1248 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1249 return(result);
kevman 0:38ceb79fef03 1250 }
kevman 0:38ceb79fef03 1251
kevman 0:38ceb79fef03 1252
kevman 0:38ceb79fef03 1253 /**
kevman 0:38ceb79fef03 1254 \brief STRT Unprivileged (8 bit)
kevman 0:38ceb79fef03 1255 \details Executes a Unprivileged STRT instruction for 8 bit values.
kevman 0:38ceb79fef03 1256 \param [in] value Value to store
kevman 0:38ceb79fef03 1257 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1258 */
kevman 0:38ceb79fef03 1259 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
kevman 0:38ceb79fef03 1260 {
kevman 0:38ceb79fef03 1261 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1262 }
kevman 0:38ceb79fef03 1263
kevman 0:38ceb79fef03 1264
kevman 0:38ceb79fef03 1265 /**
kevman 0:38ceb79fef03 1266 \brief STRT Unprivileged (16 bit)
kevman 0:38ceb79fef03 1267 \details Executes a Unprivileged STRT instruction for 16 bit values.
kevman 0:38ceb79fef03 1268 \param [in] value Value to store
kevman 0:38ceb79fef03 1269 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1270 */
kevman 0:38ceb79fef03 1271 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
kevman 0:38ceb79fef03 1272 {
kevman 0:38ceb79fef03 1273 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1274 }
kevman 0:38ceb79fef03 1275
kevman 0:38ceb79fef03 1276
kevman 0:38ceb79fef03 1277 /**
kevman 0:38ceb79fef03 1278 \brief STRT Unprivileged (32 bit)
kevman 0:38ceb79fef03 1279 \details Executes a Unprivileged STRT instruction for 32 bit values.
kevman 0:38ceb79fef03 1280 \param [in] value Value to store
kevman 0:38ceb79fef03 1281 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1282 */
kevman 0:38ceb79fef03 1283 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
kevman 0:38ceb79fef03 1284 {
kevman 0:38ceb79fef03 1285 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
kevman 0:38ceb79fef03 1286 }
kevman 0:38ceb79fef03 1287
kevman 0:38ceb79fef03 1288 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 1289 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 1290 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
kevman 0:38ceb79fef03 1291
kevman 0:38ceb79fef03 1292 /**
kevman 0:38ceb79fef03 1293 \brief Signed Saturate
kevman 0:38ceb79fef03 1294 \details Saturates a signed value.
kevman 0:38ceb79fef03 1295 \param [in] value Value to be saturated
kevman 0:38ceb79fef03 1296 \param [in] sat Bit position to saturate to (1..32)
kevman 0:38ceb79fef03 1297 \return Saturated value
kevman 0:38ceb79fef03 1298 */
kevman 0:38ceb79fef03 1299 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
kevman 0:38ceb79fef03 1300 {
kevman 0:38ceb79fef03 1301 if ((sat >= 1U) && (sat <= 32U))
kevman 0:38ceb79fef03 1302 {
kevman 0:38ceb79fef03 1303 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
kevman 0:38ceb79fef03 1304 const int32_t min = -1 - max ;
kevman 0:38ceb79fef03 1305 if (val > max)
kevman 0:38ceb79fef03 1306 {
kevman 0:38ceb79fef03 1307 return max;
kevman 0:38ceb79fef03 1308 }
kevman 0:38ceb79fef03 1309 else if (val < min)
kevman 0:38ceb79fef03 1310 {
kevman 0:38ceb79fef03 1311 return min;
kevman 0:38ceb79fef03 1312 }
kevman 0:38ceb79fef03 1313 }
kevman 0:38ceb79fef03 1314 return val;
kevman 0:38ceb79fef03 1315 }
kevman 0:38ceb79fef03 1316
kevman 0:38ceb79fef03 1317 /**
kevman 0:38ceb79fef03 1318 \brief Unsigned Saturate
kevman 0:38ceb79fef03 1319 \details Saturates an unsigned value.
kevman 0:38ceb79fef03 1320 \param [in] value Value to be saturated
kevman 0:38ceb79fef03 1321 \param [in] sat Bit position to saturate to (0..31)
kevman 0:38ceb79fef03 1322 \return Saturated value
kevman 0:38ceb79fef03 1323 */
kevman 0:38ceb79fef03 1324 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
kevman 0:38ceb79fef03 1325 {
kevman 0:38ceb79fef03 1326 if (sat <= 31U)
kevman 0:38ceb79fef03 1327 {
kevman 0:38ceb79fef03 1328 const uint32_t max = ((1U << sat) - 1U);
kevman 0:38ceb79fef03 1329 if (val > (int32_t)max)
kevman 0:38ceb79fef03 1330 {
kevman 0:38ceb79fef03 1331 return max;
kevman 0:38ceb79fef03 1332 }
kevman 0:38ceb79fef03 1333 else if (val < 0)
kevman 0:38ceb79fef03 1334 {
kevman 0:38ceb79fef03 1335 return 0U;
kevman 0:38ceb79fef03 1336 }
kevman 0:38ceb79fef03 1337 }
kevman 0:38ceb79fef03 1338 return (uint32_t)val;
kevman 0:38ceb79fef03 1339 }
kevman 0:38ceb79fef03 1340
kevman 0:38ceb79fef03 1341 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
kevman 0:38ceb79fef03 1342 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
kevman 0:38ceb79fef03 1343 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
kevman 0:38ceb79fef03 1344
kevman 0:38ceb79fef03 1345
kevman 0:38ceb79fef03 1346 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
kevman 0:38ceb79fef03 1347 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
kevman 0:38ceb79fef03 1348 /**
kevman 0:38ceb79fef03 1349 \brief Load-Acquire (8 bit)
kevman 0:38ceb79fef03 1350 \details Executes a LDAB instruction for 8 bit value.
kevman 0:38ceb79fef03 1351 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1352 \return value of type uint8_t at (*ptr)
kevman 0:38ceb79fef03 1353 */
kevman 0:38ceb79fef03 1354 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
kevman 0:38ceb79fef03 1355 {
kevman 0:38ceb79fef03 1356 uint32_t result;
kevman 0:38ceb79fef03 1357
kevman 0:38ceb79fef03 1358 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1359 return ((uint8_t) result);
kevman 0:38ceb79fef03 1360 }
kevman 0:38ceb79fef03 1361
kevman 0:38ceb79fef03 1362
kevman 0:38ceb79fef03 1363 /**
kevman 0:38ceb79fef03 1364 \brief Load-Acquire (16 bit)
kevman 0:38ceb79fef03 1365 \details Executes a LDAH instruction for 16 bit values.
kevman 0:38ceb79fef03 1366 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1367 \return value of type uint16_t at (*ptr)
kevman 0:38ceb79fef03 1368 */
kevman 0:38ceb79fef03 1369 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
kevman 0:38ceb79fef03 1370 {
kevman 0:38ceb79fef03 1371 uint32_t result;
kevman 0:38ceb79fef03 1372
kevman 0:38ceb79fef03 1373 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1374 return ((uint16_t) result);
kevman 0:38ceb79fef03 1375 }
kevman 0:38ceb79fef03 1376
kevman 0:38ceb79fef03 1377
kevman 0:38ceb79fef03 1378 /**
kevman 0:38ceb79fef03 1379 \brief Load-Acquire (32 bit)
kevman 0:38ceb79fef03 1380 \details Executes a LDA instruction for 32 bit values.
kevman 0:38ceb79fef03 1381 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1382 \return value of type uint32_t at (*ptr)
kevman 0:38ceb79fef03 1383 */
kevman 0:38ceb79fef03 1384 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
kevman 0:38ceb79fef03 1385 {
kevman 0:38ceb79fef03 1386 uint32_t result;
kevman 0:38ceb79fef03 1387
kevman 0:38ceb79fef03 1388 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1389 return(result);
kevman 0:38ceb79fef03 1390 }
kevman 0:38ceb79fef03 1391
kevman 0:38ceb79fef03 1392
kevman 0:38ceb79fef03 1393 /**
kevman 0:38ceb79fef03 1394 \brief Store-Release (8 bit)
kevman 0:38ceb79fef03 1395 \details Executes a STLB instruction for 8 bit values.
kevman 0:38ceb79fef03 1396 \param [in] value Value to store
kevman 0:38ceb79fef03 1397 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1398 */
kevman 0:38ceb79fef03 1399 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
kevman 0:38ceb79fef03 1400 {
kevman 0:38ceb79fef03 1401 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1402 }
kevman 0:38ceb79fef03 1403
kevman 0:38ceb79fef03 1404
kevman 0:38ceb79fef03 1405 /**
kevman 0:38ceb79fef03 1406 \brief Store-Release (16 bit)
kevman 0:38ceb79fef03 1407 \details Executes a STLH instruction for 16 bit values.
kevman 0:38ceb79fef03 1408 \param [in] value Value to store
kevman 0:38ceb79fef03 1409 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1410 */
kevman 0:38ceb79fef03 1411 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
kevman 0:38ceb79fef03 1412 {
kevman 0:38ceb79fef03 1413 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1414 }
kevman 0:38ceb79fef03 1415
kevman 0:38ceb79fef03 1416
kevman 0:38ceb79fef03 1417 /**
kevman 0:38ceb79fef03 1418 \brief Store-Release (32 bit)
kevman 0:38ceb79fef03 1419 \details Executes a STL instruction for 32 bit values.
kevman 0:38ceb79fef03 1420 \param [in] value Value to store
kevman 0:38ceb79fef03 1421 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1422 */
kevman 0:38ceb79fef03 1423 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
kevman 0:38ceb79fef03 1424 {
kevman 0:38ceb79fef03 1425 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1426 }
kevman 0:38ceb79fef03 1427
kevman 0:38ceb79fef03 1428
kevman 0:38ceb79fef03 1429 /**
kevman 0:38ceb79fef03 1430 \brief Load-Acquire Exclusive (8 bit)
kevman 0:38ceb79fef03 1431 \details Executes a LDAB exclusive instruction for 8 bit value.
kevman 0:38ceb79fef03 1432 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1433 \return value of type uint8_t at (*ptr)
kevman 0:38ceb79fef03 1434 */
kevman 0:38ceb79fef03 1435 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
kevman 0:38ceb79fef03 1436 {
kevman 0:38ceb79fef03 1437 uint32_t result;
kevman 0:38ceb79fef03 1438
kevman 0:38ceb79fef03 1439 __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1440 return ((uint8_t) result);
kevman 0:38ceb79fef03 1441 }
kevman 0:38ceb79fef03 1442
kevman 0:38ceb79fef03 1443
kevman 0:38ceb79fef03 1444 /**
kevman 0:38ceb79fef03 1445 \brief Load-Acquire Exclusive (16 bit)
kevman 0:38ceb79fef03 1446 \details Executes a LDAH exclusive instruction for 16 bit values.
kevman 0:38ceb79fef03 1447 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1448 \return value of type uint16_t at (*ptr)
kevman 0:38ceb79fef03 1449 */
kevman 0:38ceb79fef03 1450 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
kevman 0:38ceb79fef03 1451 {
kevman 0:38ceb79fef03 1452 uint32_t result;
kevman 0:38ceb79fef03 1453
kevman 0:38ceb79fef03 1454 __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1455 return ((uint16_t) result);
kevman 0:38ceb79fef03 1456 }
kevman 0:38ceb79fef03 1457
kevman 0:38ceb79fef03 1458
kevman 0:38ceb79fef03 1459 /**
kevman 0:38ceb79fef03 1460 \brief Load-Acquire Exclusive (32 bit)
kevman 0:38ceb79fef03 1461 \details Executes a LDA exclusive instruction for 32 bit values.
kevman 0:38ceb79fef03 1462 \param [in] ptr Pointer to data
kevman 0:38ceb79fef03 1463 \return value of type uint32_t at (*ptr)
kevman 0:38ceb79fef03 1464 */
kevman 0:38ceb79fef03 1465 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
kevman 0:38ceb79fef03 1466 {
kevman 0:38ceb79fef03 1467 uint32_t result;
kevman 0:38ceb79fef03 1468
kevman 0:38ceb79fef03 1469 __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
kevman 0:38ceb79fef03 1470 return(result);
kevman 0:38ceb79fef03 1471 }
kevman 0:38ceb79fef03 1472
kevman 0:38ceb79fef03 1473
kevman 0:38ceb79fef03 1474 /**
kevman 0:38ceb79fef03 1475 \brief Store-Release Exclusive (8 bit)
kevman 0:38ceb79fef03 1476 \details Executes a STLB exclusive instruction for 8 bit values.
kevman 0:38ceb79fef03 1477 \param [in] value Value to store
kevman 0:38ceb79fef03 1478 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1479 \return 0 Function succeeded
kevman 0:38ceb79fef03 1480 \return 1 Function failed
kevman 0:38ceb79fef03 1481 */
kevman 0:38ceb79fef03 1482 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
kevman 0:38ceb79fef03 1483 {
kevman 0:38ceb79fef03 1484 uint32_t result;
kevman 0:38ceb79fef03 1485
kevman 0:38ceb79fef03 1486 __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1487 return(result);
kevman 0:38ceb79fef03 1488 }
kevman 0:38ceb79fef03 1489
kevman 0:38ceb79fef03 1490
kevman 0:38ceb79fef03 1491 /**
kevman 0:38ceb79fef03 1492 \brief Store-Release Exclusive (16 bit)
kevman 0:38ceb79fef03 1493 \details Executes a STLH exclusive instruction for 16 bit values.
kevman 0:38ceb79fef03 1494 \param [in] value Value to store
kevman 0:38ceb79fef03 1495 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1496 \return 0 Function succeeded
kevman 0:38ceb79fef03 1497 \return 1 Function failed
kevman 0:38ceb79fef03 1498 */
kevman 0:38ceb79fef03 1499 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
kevman 0:38ceb79fef03 1500 {
kevman 0:38ceb79fef03 1501 uint32_t result;
kevman 0:38ceb79fef03 1502
kevman 0:38ceb79fef03 1503 __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1504 return(result);
kevman 0:38ceb79fef03 1505 }
kevman 0:38ceb79fef03 1506
kevman 0:38ceb79fef03 1507
kevman 0:38ceb79fef03 1508 /**
kevman 0:38ceb79fef03 1509 \brief Store-Release Exclusive (32 bit)
kevman 0:38ceb79fef03 1510 \details Executes a STL exclusive instruction for 32 bit values.
kevman 0:38ceb79fef03 1511 \param [in] value Value to store
kevman 0:38ceb79fef03 1512 \param [in] ptr Pointer to location
kevman 0:38ceb79fef03 1513 \return 0 Function succeeded
kevman 0:38ceb79fef03 1514 \return 1 Function failed
kevman 0:38ceb79fef03 1515 */
kevman 0:38ceb79fef03 1516 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
kevman 0:38ceb79fef03 1517 {
kevman 0:38ceb79fef03 1518 uint32_t result;
kevman 0:38ceb79fef03 1519
kevman 0:38ceb79fef03 1520 __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
kevman 0:38ceb79fef03 1521 return(result);
kevman 0:38ceb79fef03 1522 }
kevman 0:38ceb79fef03 1523
kevman 0:38ceb79fef03 1524 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
kevman 0:38ceb79fef03 1525 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
kevman 0:38ceb79fef03 1526
kevman 0:38ceb79fef03 1527 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
kevman 0:38ceb79fef03 1528
kevman 0:38ceb79fef03 1529
kevman 0:38ceb79fef03 1530 /* ################### Compiler specific Intrinsics ########################### */
kevman 0:38ceb79fef03 1531 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
kevman 0:38ceb79fef03 1532 Access to dedicated SIMD instructions
kevman 0:38ceb79fef03 1533 @{
kevman 0:38ceb79fef03 1534 */
kevman 0:38ceb79fef03 1535
kevman 0:38ceb79fef03 1536 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
kevman 0:38ceb79fef03 1537
kevman 0:38ceb79fef03 1538 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1539 {
kevman 0:38ceb79fef03 1540 uint32_t result;
kevman 0:38ceb79fef03 1541
kevman 0:38ceb79fef03 1542 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1543 return(result);
kevman 0:38ceb79fef03 1544 }
kevman 0:38ceb79fef03 1545
kevman 0:38ceb79fef03 1546 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1547 {
kevman 0:38ceb79fef03 1548 uint32_t result;
kevman 0:38ceb79fef03 1549
kevman 0:38ceb79fef03 1550 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1551 return(result);
kevman 0:38ceb79fef03 1552 }
kevman 0:38ceb79fef03 1553
kevman 0:38ceb79fef03 1554 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1555 {
kevman 0:38ceb79fef03 1556 uint32_t result;
kevman 0:38ceb79fef03 1557
kevman 0:38ceb79fef03 1558 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1559 return(result);
kevman 0:38ceb79fef03 1560 }
kevman 0:38ceb79fef03 1561
kevman 0:38ceb79fef03 1562 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1563 {
kevman 0:38ceb79fef03 1564 uint32_t result;
kevman 0:38ceb79fef03 1565
kevman 0:38ceb79fef03 1566 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1567 return(result);
kevman 0:38ceb79fef03 1568 }
kevman 0:38ceb79fef03 1569
kevman 0:38ceb79fef03 1570 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1571 {
kevman 0:38ceb79fef03 1572 uint32_t result;
kevman 0:38ceb79fef03 1573
kevman 0:38ceb79fef03 1574 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1575 return(result);
kevman 0:38ceb79fef03 1576 }
kevman 0:38ceb79fef03 1577
kevman 0:38ceb79fef03 1578 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1579 {
kevman 0:38ceb79fef03 1580 uint32_t result;
kevman 0:38ceb79fef03 1581
kevman 0:38ceb79fef03 1582 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1583 return(result);
kevman 0:38ceb79fef03 1584 }
kevman 0:38ceb79fef03 1585
kevman 0:38ceb79fef03 1586
kevman 0:38ceb79fef03 1587 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1588 {
kevman 0:38ceb79fef03 1589 uint32_t result;
kevman 0:38ceb79fef03 1590
kevman 0:38ceb79fef03 1591 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1592 return(result);
kevman 0:38ceb79fef03 1593 }
kevman 0:38ceb79fef03 1594
kevman 0:38ceb79fef03 1595 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1596 {
kevman 0:38ceb79fef03 1597 uint32_t result;
kevman 0:38ceb79fef03 1598
kevman 0:38ceb79fef03 1599 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1600 return(result);
kevman 0:38ceb79fef03 1601 }
kevman 0:38ceb79fef03 1602
kevman 0:38ceb79fef03 1603 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1604 {
kevman 0:38ceb79fef03 1605 uint32_t result;
kevman 0:38ceb79fef03 1606
kevman 0:38ceb79fef03 1607 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1608 return(result);
kevman 0:38ceb79fef03 1609 }
kevman 0:38ceb79fef03 1610
kevman 0:38ceb79fef03 1611 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1612 {
kevman 0:38ceb79fef03 1613 uint32_t result;
kevman 0:38ceb79fef03 1614
kevman 0:38ceb79fef03 1615 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1616 return(result);
kevman 0:38ceb79fef03 1617 }
kevman 0:38ceb79fef03 1618
kevman 0:38ceb79fef03 1619 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1620 {
kevman 0:38ceb79fef03 1621 uint32_t result;
kevman 0:38ceb79fef03 1622
kevman 0:38ceb79fef03 1623 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1624 return(result);
kevman 0:38ceb79fef03 1625 }
kevman 0:38ceb79fef03 1626
kevman 0:38ceb79fef03 1627 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1628 {
kevman 0:38ceb79fef03 1629 uint32_t result;
kevman 0:38ceb79fef03 1630
kevman 0:38ceb79fef03 1631 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1632 return(result);
kevman 0:38ceb79fef03 1633 }
kevman 0:38ceb79fef03 1634
kevman 0:38ceb79fef03 1635
kevman 0:38ceb79fef03 1636 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1637 {
kevman 0:38ceb79fef03 1638 uint32_t result;
kevman 0:38ceb79fef03 1639
kevman 0:38ceb79fef03 1640 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1641 return(result);
kevman 0:38ceb79fef03 1642 }
kevman 0:38ceb79fef03 1643
kevman 0:38ceb79fef03 1644 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1645 {
kevman 0:38ceb79fef03 1646 uint32_t result;
kevman 0:38ceb79fef03 1647
kevman 0:38ceb79fef03 1648 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1649 return(result);
kevman 0:38ceb79fef03 1650 }
kevman 0:38ceb79fef03 1651
kevman 0:38ceb79fef03 1652 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1653 {
kevman 0:38ceb79fef03 1654 uint32_t result;
kevman 0:38ceb79fef03 1655
kevman 0:38ceb79fef03 1656 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1657 return(result);
kevman 0:38ceb79fef03 1658 }
kevman 0:38ceb79fef03 1659
kevman 0:38ceb79fef03 1660 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1661 {
kevman 0:38ceb79fef03 1662 uint32_t result;
kevman 0:38ceb79fef03 1663
kevman 0:38ceb79fef03 1664 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1665 return(result);
kevman 0:38ceb79fef03 1666 }
kevman 0:38ceb79fef03 1667
kevman 0:38ceb79fef03 1668 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1669 {
kevman 0:38ceb79fef03 1670 uint32_t result;
kevman 0:38ceb79fef03 1671
kevman 0:38ceb79fef03 1672 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1673 return(result);
kevman 0:38ceb79fef03 1674 }
kevman 0:38ceb79fef03 1675
kevman 0:38ceb79fef03 1676 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1677 {
kevman 0:38ceb79fef03 1678 uint32_t result;
kevman 0:38ceb79fef03 1679
kevman 0:38ceb79fef03 1680 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1681 return(result);
kevman 0:38ceb79fef03 1682 }
kevman 0:38ceb79fef03 1683
kevman 0:38ceb79fef03 1684 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1685 {
kevman 0:38ceb79fef03 1686 uint32_t result;
kevman 0:38ceb79fef03 1687
kevman 0:38ceb79fef03 1688 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1689 return(result);
kevman 0:38ceb79fef03 1690 }
kevman 0:38ceb79fef03 1691
kevman 0:38ceb79fef03 1692 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1693 {
kevman 0:38ceb79fef03 1694 uint32_t result;
kevman 0:38ceb79fef03 1695
kevman 0:38ceb79fef03 1696 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1697 return(result);
kevman 0:38ceb79fef03 1698 }
kevman 0:38ceb79fef03 1699
kevman 0:38ceb79fef03 1700 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1701 {
kevman 0:38ceb79fef03 1702 uint32_t result;
kevman 0:38ceb79fef03 1703
kevman 0:38ceb79fef03 1704 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1705 return(result);
kevman 0:38ceb79fef03 1706 }
kevman 0:38ceb79fef03 1707
kevman 0:38ceb79fef03 1708 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1709 {
kevman 0:38ceb79fef03 1710 uint32_t result;
kevman 0:38ceb79fef03 1711
kevman 0:38ceb79fef03 1712 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1713 return(result);
kevman 0:38ceb79fef03 1714 }
kevman 0:38ceb79fef03 1715
kevman 0:38ceb79fef03 1716 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1717 {
kevman 0:38ceb79fef03 1718 uint32_t result;
kevman 0:38ceb79fef03 1719
kevman 0:38ceb79fef03 1720 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1721 return(result);
kevman 0:38ceb79fef03 1722 }
kevman 0:38ceb79fef03 1723
kevman 0:38ceb79fef03 1724 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1725 {
kevman 0:38ceb79fef03 1726 uint32_t result;
kevman 0:38ceb79fef03 1727
kevman 0:38ceb79fef03 1728 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1729 return(result);
kevman 0:38ceb79fef03 1730 }
kevman 0:38ceb79fef03 1731
kevman 0:38ceb79fef03 1732 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1733 {
kevman 0:38ceb79fef03 1734 uint32_t result;
kevman 0:38ceb79fef03 1735
kevman 0:38ceb79fef03 1736 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1737 return(result);
kevman 0:38ceb79fef03 1738 }
kevman 0:38ceb79fef03 1739
kevman 0:38ceb79fef03 1740 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1741 {
kevman 0:38ceb79fef03 1742 uint32_t result;
kevman 0:38ceb79fef03 1743
kevman 0:38ceb79fef03 1744 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1745 return(result);
kevman 0:38ceb79fef03 1746 }
kevman 0:38ceb79fef03 1747
kevman 0:38ceb79fef03 1748 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1749 {
kevman 0:38ceb79fef03 1750 uint32_t result;
kevman 0:38ceb79fef03 1751
kevman 0:38ceb79fef03 1752 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1753 return(result);
kevman 0:38ceb79fef03 1754 }
kevman 0:38ceb79fef03 1755
kevman 0:38ceb79fef03 1756 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1757 {
kevman 0:38ceb79fef03 1758 uint32_t result;
kevman 0:38ceb79fef03 1759
kevman 0:38ceb79fef03 1760 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1761 return(result);
kevman 0:38ceb79fef03 1762 }
kevman 0:38ceb79fef03 1763
kevman 0:38ceb79fef03 1764 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1765 {
kevman 0:38ceb79fef03 1766 uint32_t result;
kevman 0:38ceb79fef03 1767
kevman 0:38ceb79fef03 1768 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1769 return(result);
kevman 0:38ceb79fef03 1770 }
kevman 0:38ceb79fef03 1771
kevman 0:38ceb79fef03 1772 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1773 {
kevman 0:38ceb79fef03 1774 uint32_t result;
kevman 0:38ceb79fef03 1775
kevman 0:38ceb79fef03 1776 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1777 return(result);
kevman 0:38ceb79fef03 1778 }
kevman 0:38ceb79fef03 1779
kevman 0:38ceb79fef03 1780 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1781 {
kevman 0:38ceb79fef03 1782 uint32_t result;
kevman 0:38ceb79fef03 1783
kevman 0:38ceb79fef03 1784 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1785 return(result);
kevman 0:38ceb79fef03 1786 }
kevman 0:38ceb79fef03 1787
kevman 0:38ceb79fef03 1788 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1789 {
kevman 0:38ceb79fef03 1790 uint32_t result;
kevman 0:38ceb79fef03 1791
kevman 0:38ceb79fef03 1792 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1793 return(result);
kevman 0:38ceb79fef03 1794 }
kevman 0:38ceb79fef03 1795
kevman 0:38ceb79fef03 1796 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1797 {
kevman 0:38ceb79fef03 1798 uint32_t result;
kevman 0:38ceb79fef03 1799
kevman 0:38ceb79fef03 1800 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1801 return(result);
kevman 0:38ceb79fef03 1802 }
kevman 0:38ceb79fef03 1803
kevman 0:38ceb79fef03 1804 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1805 {
kevman 0:38ceb79fef03 1806 uint32_t result;
kevman 0:38ceb79fef03 1807
kevman 0:38ceb79fef03 1808 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1809 return(result);
kevman 0:38ceb79fef03 1810 }
kevman 0:38ceb79fef03 1811
kevman 0:38ceb79fef03 1812 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1813 {
kevman 0:38ceb79fef03 1814 uint32_t result;
kevman 0:38ceb79fef03 1815
kevman 0:38ceb79fef03 1816 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1817 return(result);
kevman 0:38ceb79fef03 1818 }
kevman 0:38ceb79fef03 1819
kevman 0:38ceb79fef03 1820 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1821 {
kevman 0:38ceb79fef03 1822 uint32_t result;
kevman 0:38ceb79fef03 1823
kevman 0:38ceb79fef03 1824 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1825 return(result);
kevman 0:38ceb79fef03 1826 }
kevman 0:38ceb79fef03 1827
kevman 0:38ceb79fef03 1828 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1829 {
kevman 0:38ceb79fef03 1830 uint32_t result;
kevman 0:38ceb79fef03 1831
kevman 0:38ceb79fef03 1832 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1833 return(result);
kevman 0:38ceb79fef03 1834 }
kevman 0:38ceb79fef03 1835
kevman 0:38ceb79fef03 1836 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
kevman 0:38ceb79fef03 1837 {
kevman 0:38ceb79fef03 1838 uint32_t result;
kevman 0:38ceb79fef03 1839
kevman 0:38ceb79fef03 1840 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
kevman 0:38ceb79fef03 1841 return(result);
kevman 0:38ceb79fef03 1842 }
kevman 0:38ceb79fef03 1843
kevman 0:38ceb79fef03 1844 #define __SSAT16(ARG1,ARG2) \
kevman 0:38ceb79fef03 1845 ({ \
kevman 0:38ceb79fef03 1846 int32_t __RES, __ARG1 = (ARG1); \
kevman 0:38ceb79fef03 1847 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
kevman 0:38ceb79fef03 1848 __RES; \
kevman 0:38ceb79fef03 1849 })
kevman 0:38ceb79fef03 1850
kevman 0:38ceb79fef03 1851 #define __USAT16(ARG1,ARG2) \
kevman 0:38ceb79fef03 1852 ({ \
kevman 0:38ceb79fef03 1853 uint32_t __RES, __ARG1 = (ARG1); \
kevman 0:38ceb79fef03 1854 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
kevman 0:38ceb79fef03 1855 __RES; \
kevman 0:38ceb79fef03 1856 })
kevman 0:38ceb79fef03 1857
kevman 0:38ceb79fef03 1858 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
kevman 0:38ceb79fef03 1859 {
kevman 0:38ceb79fef03 1860 uint32_t result;
kevman 0:38ceb79fef03 1861
kevman 0:38ceb79fef03 1862 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
kevman 0:38ceb79fef03 1863 return(result);
kevman 0:38ceb79fef03 1864 }
kevman 0:38ceb79fef03 1865
kevman 0:38ceb79fef03 1866 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1867 {
kevman 0:38ceb79fef03 1868 uint32_t result;
kevman 0:38ceb79fef03 1869
kevman 0:38ceb79fef03 1870 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1871 return(result);
kevman 0:38ceb79fef03 1872 }
kevman 0:38ceb79fef03 1873
kevman 0:38ceb79fef03 1874 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
kevman 0:38ceb79fef03 1875 {
kevman 0:38ceb79fef03 1876 uint32_t result;
kevman 0:38ceb79fef03 1877
kevman 0:38ceb79fef03 1878 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
kevman 0:38ceb79fef03 1879 return(result);
kevman 0:38ceb79fef03 1880 }
kevman 0:38ceb79fef03 1881
kevman 0:38ceb79fef03 1882 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1883 {
kevman 0:38ceb79fef03 1884 uint32_t result;
kevman 0:38ceb79fef03 1885
kevman 0:38ceb79fef03 1886 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1887 return(result);
kevman 0:38ceb79fef03 1888 }
kevman 0:38ceb79fef03 1889
kevman 0:38ceb79fef03 1890 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1891 {
kevman 0:38ceb79fef03 1892 uint32_t result;
kevman 0:38ceb79fef03 1893
kevman 0:38ceb79fef03 1894 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1895 return(result);
kevman 0:38ceb79fef03 1896 }
kevman 0:38ceb79fef03 1897
kevman 0:38ceb79fef03 1898 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1899 {
kevman 0:38ceb79fef03 1900 uint32_t result;
kevman 0:38ceb79fef03 1901
kevman 0:38ceb79fef03 1902 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1903 return(result);
kevman 0:38ceb79fef03 1904 }
kevman 0:38ceb79fef03 1905
kevman 0:38ceb79fef03 1906 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
kevman 0:38ceb79fef03 1907 {
kevman 0:38ceb79fef03 1908 uint32_t result;
kevman 0:38ceb79fef03 1909
kevman 0:38ceb79fef03 1910 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
kevman 0:38ceb79fef03 1911 return(result);
kevman 0:38ceb79fef03 1912 }
kevman 0:38ceb79fef03 1913
kevman 0:38ceb79fef03 1914 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
kevman 0:38ceb79fef03 1915 {
kevman 0:38ceb79fef03 1916 uint32_t result;
kevman 0:38ceb79fef03 1917
kevman 0:38ceb79fef03 1918 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
kevman 0:38ceb79fef03 1919 return(result);
kevman 0:38ceb79fef03 1920 }
kevman 0:38ceb79fef03 1921
kevman 0:38ceb79fef03 1922 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
kevman 0:38ceb79fef03 1923 {
kevman 0:38ceb79fef03 1924 union llreg_u{
kevman 0:38ceb79fef03 1925 uint32_t w32[2];
kevman 0:38ceb79fef03 1926 uint64_t w64;
kevman 0:38ceb79fef03 1927 } llr;
kevman 0:38ceb79fef03 1928 llr.w64 = acc;
kevman 0:38ceb79fef03 1929
kevman 0:38ceb79fef03 1930 #ifndef __ARMEB__ /* Little endian */
kevman 0:38ceb79fef03 1931 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
kevman 0:38ceb79fef03 1932 #else /* Big endian */
kevman 0:38ceb79fef03 1933 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
kevman 0:38ceb79fef03 1934 #endif
kevman 0:38ceb79fef03 1935
kevman 0:38ceb79fef03 1936 return(llr.w64);
kevman 0:38ceb79fef03 1937 }
kevman 0:38ceb79fef03 1938
kevman 0:38ceb79fef03 1939 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
kevman 0:38ceb79fef03 1940 {
kevman 0:38ceb79fef03 1941 union llreg_u{
kevman 0:38ceb79fef03 1942 uint32_t w32[2];
kevman 0:38ceb79fef03 1943 uint64_t w64;
kevman 0:38ceb79fef03 1944 } llr;
kevman 0:38ceb79fef03 1945 llr.w64 = acc;
kevman 0:38ceb79fef03 1946
kevman 0:38ceb79fef03 1947 #ifndef __ARMEB__ /* Little endian */
kevman 0:38ceb79fef03 1948 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
kevman 0:38ceb79fef03 1949 #else /* Big endian */
kevman 0:38ceb79fef03 1950 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
kevman 0:38ceb79fef03 1951 #endif
kevman 0:38ceb79fef03 1952
kevman 0:38ceb79fef03 1953 return(llr.w64);
kevman 0:38ceb79fef03 1954 }
kevman 0:38ceb79fef03 1955
kevman 0:38ceb79fef03 1956 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1957 {
kevman 0:38ceb79fef03 1958 uint32_t result;
kevman 0:38ceb79fef03 1959
kevman 0:38ceb79fef03 1960 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1961 return(result);
kevman 0:38ceb79fef03 1962 }
kevman 0:38ceb79fef03 1963
kevman 0:38ceb79fef03 1964 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 1965 {
kevman 0:38ceb79fef03 1966 uint32_t result;
kevman 0:38ceb79fef03 1967
kevman 0:38ceb79fef03 1968 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 1969 return(result);
kevman 0:38ceb79fef03 1970 }
kevman 0:38ceb79fef03 1971
kevman 0:38ceb79fef03 1972 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
kevman 0:38ceb79fef03 1973 {
kevman 0:38ceb79fef03 1974 uint32_t result;
kevman 0:38ceb79fef03 1975
kevman 0:38ceb79fef03 1976 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
kevman 0:38ceb79fef03 1977 return(result);
kevman 0:38ceb79fef03 1978 }
kevman 0:38ceb79fef03 1979
kevman 0:38ceb79fef03 1980 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
kevman 0:38ceb79fef03 1981 {
kevman 0:38ceb79fef03 1982 uint32_t result;
kevman 0:38ceb79fef03 1983
kevman 0:38ceb79fef03 1984 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
kevman 0:38ceb79fef03 1985 return(result);
kevman 0:38ceb79fef03 1986 }
kevman 0:38ceb79fef03 1987
kevman 0:38ceb79fef03 1988 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
kevman 0:38ceb79fef03 1989 {
kevman 0:38ceb79fef03 1990 union llreg_u{
kevman 0:38ceb79fef03 1991 uint32_t w32[2];
kevman 0:38ceb79fef03 1992 uint64_t w64;
kevman 0:38ceb79fef03 1993 } llr;
kevman 0:38ceb79fef03 1994 llr.w64 = acc;
kevman 0:38ceb79fef03 1995
kevman 0:38ceb79fef03 1996 #ifndef __ARMEB__ /* Little endian */
kevman 0:38ceb79fef03 1997 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
kevman 0:38ceb79fef03 1998 #else /* Big endian */
kevman 0:38ceb79fef03 1999 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
kevman 0:38ceb79fef03 2000 #endif
kevman 0:38ceb79fef03 2001
kevman 0:38ceb79fef03 2002 return(llr.w64);
kevman 0:38ceb79fef03 2003 }
kevman 0:38ceb79fef03 2004
kevman 0:38ceb79fef03 2005 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
kevman 0:38ceb79fef03 2006 {
kevman 0:38ceb79fef03 2007 union llreg_u{
kevman 0:38ceb79fef03 2008 uint32_t w32[2];
kevman 0:38ceb79fef03 2009 uint64_t w64;
kevman 0:38ceb79fef03 2010 } llr;
kevman 0:38ceb79fef03 2011 llr.w64 = acc;
kevman 0:38ceb79fef03 2012
kevman 0:38ceb79fef03 2013 #ifndef __ARMEB__ /* Little endian */
kevman 0:38ceb79fef03 2014 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
kevman 0:38ceb79fef03 2015 #else /* Big endian */
kevman 0:38ceb79fef03 2016 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
kevman 0:38ceb79fef03 2017 #endif
kevman 0:38ceb79fef03 2018
kevman 0:38ceb79fef03 2019 return(llr.w64);
kevman 0:38ceb79fef03 2020 }
kevman 0:38ceb79fef03 2021
kevman 0:38ceb79fef03 2022 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
kevman 0:38ceb79fef03 2023 {
kevman 0:38ceb79fef03 2024 uint32_t result;
kevman 0:38ceb79fef03 2025
kevman 0:38ceb79fef03 2026 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 2027 return(result);
kevman 0:38ceb79fef03 2028 }
kevman 0:38ceb79fef03 2029
kevman 0:38ceb79fef03 2030 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
kevman 0:38ceb79fef03 2031 {
kevman 0:38ceb79fef03 2032 int32_t result;
kevman 0:38ceb79fef03 2033
kevman 0:38ceb79fef03 2034 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 2035 return(result);
kevman 0:38ceb79fef03 2036 }
kevman 0:38ceb79fef03 2037
kevman 0:38ceb79fef03 2038 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
kevman 0:38ceb79fef03 2039 {
kevman 0:38ceb79fef03 2040 int32_t result;
kevman 0:38ceb79fef03 2041
kevman 0:38ceb79fef03 2042 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
kevman 0:38ceb79fef03 2043 return(result);
kevman 0:38ceb79fef03 2044 }
kevman 0:38ceb79fef03 2045
kevman 0:38ceb79fef03 2046 #if 0
kevman 0:38ceb79fef03 2047 #define __PKHBT(ARG1,ARG2,ARG3) \
kevman 0:38ceb79fef03 2048 ({ \
kevman 0:38ceb79fef03 2049 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
kevman 0:38ceb79fef03 2050 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
kevman 0:38ceb79fef03 2051 __RES; \
kevman 0:38ceb79fef03 2052 })
kevman 0:38ceb79fef03 2053
kevman 0:38ceb79fef03 2054 #define __PKHTB(ARG1,ARG2,ARG3) \
kevman 0:38ceb79fef03 2055 ({ \
kevman 0:38ceb79fef03 2056 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
kevman 0:38ceb79fef03 2057 if (ARG3 == 0) \
kevman 0:38ceb79fef03 2058 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
kevman 0:38ceb79fef03 2059 else \
kevman 0:38ceb79fef03 2060 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
kevman 0:38ceb79fef03 2061 __RES; \
kevman 0:38ceb79fef03 2062 })
kevman 0:38ceb79fef03 2063 #endif
kevman 0:38ceb79fef03 2064
kevman 0:38ceb79fef03 2065 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
kevman 0:38ceb79fef03 2066 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
kevman 0:38ceb79fef03 2067
kevman 0:38ceb79fef03 2068 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
kevman 0:38ceb79fef03 2069 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
kevman 0:38ceb79fef03 2070
kevman 0:38ceb79fef03 2071 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
kevman 0:38ceb79fef03 2072 {
kevman 0:38ceb79fef03 2073 int32_t result;
kevman 0:38ceb79fef03 2074
kevman 0:38ceb79fef03 2075 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
kevman 0:38ceb79fef03 2076 return(result);
kevman 0:38ceb79fef03 2077 }
kevman 0:38ceb79fef03 2078
kevman 0:38ceb79fef03 2079 #endif /* (__ARM_FEATURE_DSP == 1) */
kevman 0:38ceb79fef03 2080 /*@} end of group CMSIS_SIMD_intrinsics */
kevman 0:38ceb79fef03 2081
kevman 0:38ceb79fef03 2082
kevman 0:38ceb79fef03 2083 #pragma GCC diagnostic pop
kevman 0:38ceb79fef03 2084
kevman 0:38ceb79fef03 2085 #endif /* __CMSIS_GCC_H */