mbed library sources

Fork of mbed-src by mbed official

Committer:
lzbpli
Date:
Thu Jul 07 06:48:59 2016 +0000
Revision:
636:b0d178e9fa10
Parent:
13:0645d8841f51
l053

Who changed what in which revision?

UserRevisionLine numberNew contents of line
bogdanm 13:0645d8841f51 1 /**************************************************************************//**
bogdanm 13:0645d8841f51 2 * @file core_cm4_simd.h
bogdanm 13:0645d8841f51 3 * @brief CMSIS Cortex-M4 SIMD Header File
bogdanm 13:0645d8841f51 4 * @version V3.20
bogdanm 13:0645d8841f51 5 * @date 25. February 2013
bogdanm 13:0645d8841f51 6 *
bogdanm 13:0645d8841f51 7 * @note
bogdanm 13:0645d8841f51 8 *
bogdanm 13:0645d8841f51 9 ******************************************************************************/
bogdanm 13:0645d8841f51 10 /* Copyright (c) 2009 - 2013 ARM LIMITED
bogdanm 13:0645d8841f51 11
bogdanm 13:0645d8841f51 12 All rights reserved.
bogdanm 13:0645d8841f51 13 Redistribution and use in source and binary forms, with or without
bogdanm 13:0645d8841f51 14 modification, are permitted provided that the following conditions are met:
bogdanm 13:0645d8841f51 15 - Redistributions of source code must retain the above copyright
bogdanm 13:0645d8841f51 16 notice, this list of conditions and the following disclaimer.
bogdanm 13:0645d8841f51 17 - Redistributions in binary form must reproduce the above copyright
bogdanm 13:0645d8841f51 18 notice, this list of conditions and the following disclaimer in the
bogdanm 13:0645d8841f51 19 documentation and/or other materials provided with the distribution.
bogdanm 13:0645d8841f51 20 - Neither the name of ARM nor the names of its contributors may be used
bogdanm 13:0645d8841f51 21 to endorse or promote products derived from this software without
bogdanm 13:0645d8841f51 22 specific prior written permission.
bogdanm 13:0645d8841f51 23 *
bogdanm 13:0645d8841f51 24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
bogdanm 13:0645d8841f51 25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
bogdanm 13:0645d8841f51 26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
bogdanm 13:0645d8841f51 27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
bogdanm 13:0645d8841f51 28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
bogdanm 13:0645d8841f51 29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
bogdanm 13:0645d8841f51 30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
bogdanm 13:0645d8841f51 31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
bogdanm 13:0645d8841f51 32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
bogdanm 13:0645d8841f51 33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
bogdanm 13:0645d8841f51 34 POSSIBILITY OF SUCH DAMAGE.
bogdanm 13:0645d8841f51 35 ---------------------------------------------------------------------------*/
bogdanm 13:0645d8841f51 36
bogdanm 13:0645d8841f51 37
bogdanm 13:0645d8841f51 38 #ifdef __cplusplus
bogdanm 13:0645d8841f51 39 extern "C" {
bogdanm 13:0645d8841f51 40 #endif
bogdanm 13:0645d8841f51 41
bogdanm 13:0645d8841f51 42 #ifndef __CORE_CM4_SIMD_H
bogdanm 13:0645d8841f51 43 #define __CORE_CM4_SIMD_H
bogdanm 13:0645d8841f51 44
bogdanm 13:0645d8841f51 45
bogdanm 13:0645d8841f51 46 /*******************************************************************************
bogdanm 13:0645d8841f51 47 * Hardware Abstraction Layer
bogdanm 13:0645d8841f51 48 ******************************************************************************/
bogdanm 13:0645d8841f51 49
bogdanm 13:0645d8841f51 50
bogdanm 13:0645d8841f51 51 /* ################### Compiler specific Intrinsics ########################### */
bogdanm 13:0645d8841f51 52 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
bogdanm 13:0645d8841f51 53 Access to dedicated SIMD instructions
bogdanm 13:0645d8841f51 54 @{
bogdanm 13:0645d8841f51 55 */
bogdanm 13:0645d8841f51 56
bogdanm 13:0645d8841f51 57 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
bogdanm 13:0645d8841f51 58 /* ARM armcc specific functions */
bogdanm 13:0645d8841f51 59
bogdanm 13:0645d8841f51 60 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 61 #define __SADD8 __sadd8
bogdanm 13:0645d8841f51 62 #define __QADD8 __qadd8
bogdanm 13:0645d8841f51 63 #define __SHADD8 __shadd8
bogdanm 13:0645d8841f51 64 #define __UADD8 __uadd8
bogdanm 13:0645d8841f51 65 #define __UQADD8 __uqadd8
bogdanm 13:0645d8841f51 66 #define __UHADD8 __uhadd8
bogdanm 13:0645d8841f51 67 #define __SSUB8 __ssub8
bogdanm 13:0645d8841f51 68 #define __QSUB8 __qsub8
bogdanm 13:0645d8841f51 69 #define __SHSUB8 __shsub8
bogdanm 13:0645d8841f51 70 #define __USUB8 __usub8
bogdanm 13:0645d8841f51 71 #define __UQSUB8 __uqsub8
bogdanm 13:0645d8841f51 72 #define __UHSUB8 __uhsub8
bogdanm 13:0645d8841f51 73 #define __SADD16 __sadd16
bogdanm 13:0645d8841f51 74 #define __QADD16 __qadd16
bogdanm 13:0645d8841f51 75 #define __SHADD16 __shadd16
bogdanm 13:0645d8841f51 76 #define __UADD16 __uadd16
bogdanm 13:0645d8841f51 77 #define __UQADD16 __uqadd16
bogdanm 13:0645d8841f51 78 #define __UHADD16 __uhadd16
bogdanm 13:0645d8841f51 79 #define __SSUB16 __ssub16
bogdanm 13:0645d8841f51 80 #define __QSUB16 __qsub16
bogdanm 13:0645d8841f51 81 #define __SHSUB16 __shsub16
bogdanm 13:0645d8841f51 82 #define __USUB16 __usub16
bogdanm 13:0645d8841f51 83 #define __UQSUB16 __uqsub16
bogdanm 13:0645d8841f51 84 #define __UHSUB16 __uhsub16
bogdanm 13:0645d8841f51 85 #define __SASX __sasx
bogdanm 13:0645d8841f51 86 #define __QASX __qasx
bogdanm 13:0645d8841f51 87 #define __SHASX __shasx
bogdanm 13:0645d8841f51 88 #define __UASX __uasx
bogdanm 13:0645d8841f51 89 #define __UQASX __uqasx
bogdanm 13:0645d8841f51 90 #define __UHASX __uhasx
bogdanm 13:0645d8841f51 91 #define __SSAX __ssax
bogdanm 13:0645d8841f51 92 #define __QSAX __qsax
bogdanm 13:0645d8841f51 93 #define __SHSAX __shsax
bogdanm 13:0645d8841f51 94 #define __USAX __usax
bogdanm 13:0645d8841f51 95 #define __UQSAX __uqsax
bogdanm 13:0645d8841f51 96 #define __UHSAX __uhsax
bogdanm 13:0645d8841f51 97 #define __USAD8 __usad8
bogdanm 13:0645d8841f51 98 #define __USADA8 __usada8
bogdanm 13:0645d8841f51 99 #define __SSAT16 __ssat16
bogdanm 13:0645d8841f51 100 #define __USAT16 __usat16
bogdanm 13:0645d8841f51 101 #define __UXTB16 __uxtb16
bogdanm 13:0645d8841f51 102 #define __UXTAB16 __uxtab16
bogdanm 13:0645d8841f51 103 #define __SXTB16 __sxtb16
bogdanm 13:0645d8841f51 104 #define __SXTAB16 __sxtab16
bogdanm 13:0645d8841f51 105 #define __SMUAD __smuad
bogdanm 13:0645d8841f51 106 #define __SMUADX __smuadx
bogdanm 13:0645d8841f51 107 #define __SMLAD __smlad
bogdanm 13:0645d8841f51 108 #define __SMLADX __smladx
bogdanm 13:0645d8841f51 109 #define __SMLALD __smlald
bogdanm 13:0645d8841f51 110 #define __SMLALDX __smlaldx
bogdanm 13:0645d8841f51 111 #define __SMUSD __smusd
bogdanm 13:0645d8841f51 112 #define __SMUSDX __smusdx
bogdanm 13:0645d8841f51 113 #define __SMLSD __smlsd
bogdanm 13:0645d8841f51 114 #define __SMLSDX __smlsdx
bogdanm 13:0645d8841f51 115 #define __SMLSLD __smlsld
bogdanm 13:0645d8841f51 116 #define __SMLSLDX __smlsldx
bogdanm 13:0645d8841f51 117 #define __SEL __sel
bogdanm 13:0645d8841f51 118 #define __QADD __qadd
bogdanm 13:0645d8841f51 119 #define __QSUB __qsub
bogdanm 13:0645d8841f51 120
bogdanm 13:0645d8841f51 121 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
bogdanm 13:0645d8841f51 122 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
bogdanm 13:0645d8841f51 123
bogdanm 13:0645d8841f51 124 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
bogdanm 13:0645d8841f51 125 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
bogdanm 13:0645d8841f51 126
bogdanm 13:0645d8841f51 127 #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
bogdanm 13:0645d8841f51 128 ((int64_t)(ARG3) << 32) ) >> 32))
bogdanm 13:0645d8841f51 129
bogdanm 13:0645d8841f51 130 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 131
bogdanm 13:0645d8841f51 132
bogdanm 13:0645d8841f51 133
bogdanm 13:0645d8841f51 134 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
bogdanm 13:0645d8841f51 135 /* IAR iccarm specific functions */
bogdanm 13:0645d8841f51 136
bogdanm 13:0645d8841f51 137 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 138 #include <cmsis_iar.h>
bogdanm 13:0645d8841f51 139
bogdanm 13:0645d8841f51 140 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 141
bogdanm 13:0645d8841f51 142
bogdanm 13:0645d8841f51 143
bogdanm 13:0645d8841f51 144 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
bogdanm 13:0645d8841f51 145 /* TI CCS specific functions */
bogdanm 13:0645d8841f51 146
bogdanm 13:0645d8841f51 147 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 148 #include <cmsis_ccs.h>
bogdanm 13:0645d8841f51 149
bogdanm 13:0645d8841f51 150 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 151
bogdanm 13:0645d8841f51 152
bogdanm 13:0645d8841f51 153
bogdanm 13:0645d8841f51 154 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
bogdanm 13:0645d8841f51 155 /* GNU gcc specific functions */
bogdanm 13:0645d8841f51 156
bogdanm 13:0645d8841f51 157 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 158 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 159 {
bogdanm 13:0645d8841f51 160 uint32_t result;
bogdanm 13:0645d8841f51 161
bogdanm 13:0645d8841f51 162 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 163 return(result);
bogdanm 13:0645d8841f51 164 }
bogdanm 13:0645d8841f51 165
bogdanm 13:0645d8841f51 166 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 167 {
bogdanm 13:0645d8841f51 168 uint32_t result;
bogdanm 13:0645d8841f51 169
bogdanm 13:0645d8841f51 170 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 171 return(result);
bogdanm 13:0645d8841f51 172 }
bogdanm 13:0645d8841f51 173
bogdanm 13:0645d8841f51 174 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 175 {
bogdanm 13:0645d8841f51 176 uint32_t result;
bogdanm 13:0645d8841f51 177
bogdanm 13:0645d8841f51 178 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 179 return(result);
bogdanm 13:0645d8841f51 180 }
bogdanm 13:0645d8841f51 181
bogdanm 13:0645d8841f51 182 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 183 {
bogdanm 13:0645d8841f51 184 uint32_t result;
bogdanm 13:0645d8841f51 185
bogdanm 13:0645d8841f51 186 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 187 return(result);
bogdanm 13:0645d8841f51 188 }
bogdanm 13:0645d8841f51 189
bogdanm 13:0645d8841f51 190 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 191 {
bogdanm 13:0645d8841f51 192 uint32_t result;
bogdanm 13:0645d8841f51 193
bogdanm 13:0645d8841f51 194 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 195 return(result);
bogdanm 13:0645d8841f51 196 }
bogdanm 13:0645d8841f51 197
bogdanm 13:0645d8841f51 198 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 199 {
bogdanm 13:0645d8841f51 200 uint32_t result;
bogdanm 13:0645d8841f51 201
bogdanm 13:0645d8841f51 202 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 203 return(result);
bogdanm 13:0645d8841f51 204 }
bogdanm 13:0645d8841f51 205
bogdanm 13:0645d8841f51 206
bogdanm 13:0645d8841f51 207 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 208 {
bogdanm 13:0645d8841f51 209 uint32_t result;
bogdanm 13:0645d8841f51 210
bogdanm 13:0645d8841f51 211 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 212 return(result);
bogdanm 13:0645d8841f51 213 }
bogdanm 13:0645d8841f51 214
bogdanm 13:0645d8841f51 215 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 216 {
bogdanm 13:0645d8841f51 217 uint32_t result;
bogdanm 13:0645d8841f51 218
bogdanm 13:0645d8841f51 219 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 220 return(result);
bogdanm 13:0645d8841f51 221 }
bogdanm 13:0645d8841f51 222
bogdanm 13:0645d8841f51 223 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 224 {
bogdanm 13:0645d8841f51 225 uint32_t result;
bogdanm 13:0645d8841f51 226
bogdanm 13:0645d8841f51 227 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 228 return(result);
bogdanm 13:0645d8841f51 229 }
bogdanm 13:0645d8841f51 230
bogdanm 13:0645d8841f51 231 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 232 {
bogdanm 13:0645d8841f51 233 uint32_t result;
bogdanm 13:0645d8841f51 234
bogdanm 13:0645d8841f51 235 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 236 return(result);
bogdanm 13:0645d8841f51 237 }
bogdanm 13:0645d8841f51 238
bogdanm 13:0645d8841f51 239 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 240 {
bogdanm 13:0645d8841f51 241 uint32_t result;
bogdanm 13:0645d8841f51 242
bogdanm 13:0645d8841f51 243 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 244 return(result);
bogdanm 13:0645d8841f51 245 }
bogdanm 13:0645d8841f51 246
bogdanm 13:0645d8841f51 247 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 248 {
bogdanm 13:0645d8841f51 249 uint32_t result;
bogdanm 13:0645d8841f51 250
bogdanm 13:0645d8841f51 251 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 252 return(result);
bogdanm 13:0645d8841f51 253 }
bogdanm 13:0645d8841f51 254
bogdanm 13:0645d8841f51 255
bogdanm 13:0645d8841f51 256 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 257 {
bogdanm 13:0645d8841f51 258 uint32_t result;
bogdanm 13:0645d8841f51 259
bogdanm 13:0645d8841f51 260 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 261 return(result);
bogdanm 13:0645d8841f51 262 }
bogdanm 13:0645d8841f51 263
bogdanm 13:0645d8841f51 264 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 265 {
bogdanm 13:0645d8841f51 266 uint32_t result;
bogdanm 13:0645d8841f51 267
bogdanm 13:0645d8841f51 268 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 269 return(result);
bogdanm 13:0645d8841f51 270 }
bogdanm 13:0645d8841f51 271
bogdanm 13:0645d8841f51 272 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 273 {
bogdanm 13:0645d8841f51 274 uint32_t result;
bogdanm 13:0645d8841f51 275
bogdanm 13:0645d8841f51 276 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 277 return(result);
bogdanm 13:0645d8841f51 278 }
bogdanm 13:0645d8841f51 279
bogdanm 13:0645d8841f51 280 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 281 {
bogdanm 13:0645d8841f51 282 uint32_t result;
bogdanm 13:0645d8841f51 283
bogdanm 13:0645d8841f51 284 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 285 return(result);
bogdanm 13:0645d8841f51 286 }
bogdanm 13:0645d8841f51 287
bogdanm 13:0645d8841f51 288 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 289 {
bogdanm 13:0645d8841f51 290 uint32_t result;
bogdanm 13:0645d8841f51 291
bogdanm 13:0645d8841f51 292 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 293 return(result);
bogdanm 13:0645d8841f51 294 }
bogdanm 13:0645d8841f51 295
bogdanm 13:0645d8841f51 296 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 297 {
bogdanm 13:0645d8841f51 298 uint32_t result;
bogdanm 13:0645d8841f51 299
bogdanm 13:0645d8841f51 300 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 301 return(result);
bogdanm 13:0645d8841f51 302 }
bogdanm 13:0645d8841f51 303
bogdanm 13:0645d8841f51 304 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 305 {
bogdanm 13:0645d8841f51 306 uint32_t result;
bogdanm 13:0645d8841f51 307
bogdanm 13:0645d8841f51 308 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 309 return(result);
bogdanm 13:0645d8841f51 310 }
bogdanm 13:0645d8841f51 311
bogdanm 13:0645d8841f51 312 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 313 {
bogdanm 13:0645d8841f51 314 uint32_t result;
bogdanm 13:0645d8841f51 315
bogdanm 13:0645d8841f51 316 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 317 return(result);
bogdanm 13:0645d8841f51 318 }
bogdanm 13:0645d8841f51 319
bogdanm 13:0645d8841f51 320 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 321 {
bogdanm 13:0645d8841f51 322 uint32_t result;
bogdanm 13:0645d8841f51 323
bogdanm 13:0645d8841f51 324 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 325 return(result);
bogdanm 13:0645d8841f51 326 }
bogdanm 13:0645d8841f51 327
bogdanm 13:0645d8841f51 328 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 329 {
bogdanm 13:0645d8841f51 330 uint32_t result;
bogdanm 13:0645d8841f51 331
bogdanm 13:0645d8841f51 332 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 333 return(result);
bogdanm 13:0645d8841f51 334 }
bogdanm 13:0645d8841f51 335
bogdanm 13:0645d8841f51 336 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 337 {
bogdanm 13:0645d8841f51 338 uint32_t result;
bogdanm 13:0645d8841f51 339
bogdanm 13:0645d8841f51 340 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 341 return(result);
bogdanm 13:0645d8841f51 342 }
bogdanm 13:0645d8841f51 343
bogdanm 13:0645d8841f51 344 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 345 {
bogdanm 13:0645d8841f51 346 uint32_t result;
bogdanm 13:0645d8841f51 347
bogdanm 13:0645d8841f51 348 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 349 return(result);
bogdanm 13:0645d8841f51 350 }
bogdanm 13:0645d8841f51 351
bogdanm 13:0645d8841f51 352 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 353 {
bogdanm 13:0645d8841f51 354 uint32_t result;
bogdanm 13:0645d8841f51 355
bogdanm 13:0645d8841f51 356 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 357 return(result);
bogdanm 13:0645d8841f51 358 }
bogdanm 13:0645d8841f51 359
bogdanm 13:0645d8841f51 360 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 361 {
bogdanm 13:0645d8841f51 362 uint32_t result;
bogdanm 13:0645d8841f51 363
bogdanm 13:0645d8841f51 364 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 365 return(result);
bogdanm 13:0645d8841f51 366 }
bogdanm 13:0645d8841f51 367
bogdanm 13:0645d8841f51 368 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 369 {
bogdanm 13:0645d8841f51 370 uint32_t result;
bogdanm 13:0645d8841f51 371
bogdanm 13:0645d8841f51 372 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 373 return(result);
bogdanm 13:0645d8841f51 374 }
bogdanm 13:0645d8841f51 375
bogdanm 13:0645d8841f51 376 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 377 {
bogdanm 13:0645d8841f51 378 uint32_t result;
bogdanm 13:0645d8841f51 379
bogdanm 13:0645d8841f51 380 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 381 return(result);
bogdanm 13:0645d8841f51 382 }
bogdanm 13:0645d8841f51 383
bogdanm 13:0645d8841f51 384 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 385 {
bogdanm 13:0645d8841f51 386 uint32_t result;
bogdanm 13:0645d8841f51 387
bogdanm 13:0645d8841f51 388 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 389 return(result);
bogdanm 13:0645d8841f51 390 }
bogdanm 13:0645d8841f51 391
bogdanm 13:0645d8841f51 392 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 393 {
bogdanm 13:0645d8841f51 394 uint32_t result;
bogdanm 13:0645d8841f51 395
bogdanm 13:0645d8841f51 396 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 397 return(result);
bogdanm 13:0645d8841f51 398 }
bogdanm 13:0645d8841f51 399
bogdanm 13:0645d8841f51 400 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 401 {
bogdanm 13:0645d8841f51 402 uint32_t result;
bogdanm 13:0645d8841f51 403
bogdanm 13:0645d8841f51 404 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 405 return(result);
bogdanm 13:0645d8841f51 406 }
bogdanm 13:0645d8841f51 407
bogdanm 13:0645d8841f51 408 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 409 {
bogdanm 13:0645d8841f51 410 uint32_t result;
bogdanm 13:0645d8841f51 411
bogdanm 13:0645d8841f51 412 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 413 return(result);
bogdanm 13:0645d8841f51 414 }
bogdanm 13:0645d8841f51 415
bogdanm 13:0645d8841f51 416 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 417 {
bogdanm 13:0645d8841f51 418 uint32_t result;
bogdanm 13:0645d8841f51 419
bogdanm 13:0645d8841f51 420 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 421 return(result);
bogdanm 13:0645d8841f51 422 }
bogdanm 13:0645d8841f51 423
bogdanm 13:0645d8841f51 424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 425 {
bogdanm 13:0645d8841f51 426 uint32_t result;
bogdanm 13:0645d8841f51 427
bogdanm 13:0645d8841f51 428 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 429 return(result);
bogdanm 13:0645d8841f51 430 }
bogdanm 13:0645d8841f51 431
bogdanm 13:0645d8841f51 432 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 433 {
bogdanm 13:0645d8841f51 434 uint32_t result;
bogdanm 13:0645d8841f51 435
bogdanm 13:0645d8841f51 436 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 437 return(result);
bogdanm 13:0645d8841f51 438 }
bogdanm 13:0645d8841f51 439
bogdanm 13:0645d8841f51 440 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 441 {
bogdanm 13:0645d8841f51 442 uint32_t result;
bogdanm 13:0645d8841f51 443
bogdanm 13:0645d8841f51 444 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 445 return(result);
bogdanm 13:0645d8841f51 446 }
bogdanm 13:0645d8841f51 447
bogdanm 13:0645d8841f51 448 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 449 {
bogdanm 13:0645d8841f51 450 uint32_t result;
bogdanm 13:0645d8841f51 451
bogdanm 13:0645d8841f51 452 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 453 return(result);
bogdanm 13:0645d8841f51 454 }
bogdanm 13:0645d8841f51 455
bogdanm 13:0645d8841f51 456 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
bogdanm 13:0645d8841f51 457 {
bogdanm 13:0645d8841f51 458 uint32_t result;
bogdanm 13:0645d8841f51 459
bogdanm 13:0645d8841f51 460 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
bogdanm 13:0645d8841f51 461 return(result);
bogdanm 13:0645d8841f51 462 }
bogdanm 13:0645d8841f51 463
bogdanm 13:0645d8841f51 464 #define __SSAT16(ARG1,ARG2) \
bogdanm 13:0645d8841f51 465 ({ \
bogdanm 13:0645d8841f51 466 uint32_t __RES, __ARG1 = (ARG1); \
bogdanm 13:0645d8841f51 467 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
bogdanm 13:0645d8841f51 468 __RES; \
bogdanm 13:0645d8841f51 469 })
bogdanm 13:0645d8841f51 470
bogdanm 13:0645d8841f51 471 #define __USAT16(ARG1,ARG2) \
bogdanm 13:0645d8841f51 472 ({ \
bogdanm 13:0645d8841f51 473 uint32_t __RES, __ARG1 = (ARG1); \
bogdanm 13:0645d8841f51 474 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
bogdanm 13:0645d8841f51 475 __RES; \
bogdanm 13:0645d8841f51 476 })
bogdanm 13:0645d8841f51 477
bogdanm 13:0645d8841f51 478 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
bogdanm 13:0645d8841f51 479 {
bogdanm 13:0645d8841f51 480 uint32_t result;
bogdanm 13:0645d8841f51 481
bogdanm 13:0645d8841f51 482 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
bogdanm 13:0645d8841f51 483 return(result);
bogdanm 13:0645d8841f51 484 }
bogdanm 13:0645d8841f51 485
bogdanm 13:0645d8841f51 486 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 487 {
bogdanm 13:0645d8841f51 488 uint32_t result;
bogdanm 13:0645d8841f51 489
bogdanm 13:0645d8841f51 490 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 491 return(result);
bogdanm 13:0645d8841f51 492 }
bogdanm 13:0645d8841f51 493
bogdanm 13:0645d8841f51 494 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
bogdanm 13:0645d8841f51 495 {
bogdanm 13:0645d8841f51 496 uint32_t result;
bogdanm 13:0645d8841f51 497
bogdanm 13:0645d8841f51 498 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
bogdanm 13:0645d8841f51 499 return(result);
bogdanm 13:0645d8841f51 500 }
bogdanm 13:0645d8841f51 501
bogdanm 13:0645d8841f51 502 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 503 {
bogdanm 13:0645d8841f51 504 uint32_t result;
bogdanm 13:0645d8841f51 505
bogdanm 13:0645d8841f51 506 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 507 return(result);
bogdanm 13:0645d8841f51 508 }
bogdanm 13:0645d8841f51 509
bogdanm 13:0645d8841f51 510 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 511 {
bogdanm 13:0645d8841f51 512 uint32_t result;
bogdanm 13:0645d8841f51 513
bogdanm 13:0645d8841f51 514 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 515 return(result);
bogdanm 13:0645d8841f51 516 }
bogdanm 13:0645d8841f51 517
bogdanm 13:0645d8841f51 518 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 519 {
bogdanm 13:0645d8841f51 520 uint32_t result;
bogdanm 13:0645d8841f51 521
bogdanm 13:0645d8841f51 522 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 523 return(result);
bogdanm 13:0645d8841f51 524 }
bogdanm 13:0645d8841f51 525
bogdanm 13:0645d8841f51 526 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
bogdanm 13:0645d8841f51 527 {
bogdanm 13:0645d8841f51 528 uint32_t result;
bogdanm 13:0645d8841f51 529
bogdanm 13:0645d8841f51 530 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
bogdanm 13:0645d8841f51 531 return(result);
bogdanm 13:0645d8841f51 532 }
bogdanm 13:0645d8841f51 533
bogdanm 13:0645d8841f51 534 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
bogdanm 13:0645d8841f51 535 {
bogdanm 13:0645d8841f51 536 uint32_t result;
bogdanm 13:0645d8841f51 537
bogdanm 13:0645d8841f51 538 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
bogdanm 13:0645d8841f51 539 return(result);
bogdanm 13:0645d8841f51 540 }
bogdanm 13:0645d8841f51 541
bogdanm 13:0645d8841f51 542 #define __SMLALD(ARG1,ARG2,ARG3) \
bogdanm 13:0645d8841f51 543 ({ \
bogdanm 13:0645d8841f51 544 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((uint64_t)(ARG3) >> 32), __ARG3_L = (uint32_t)((uint64_t)(ARG3) & 0xFFFFFFFFUL); \
bogdanm 13:0645d8841f51 545 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
bogdanm 13:0645d8841f51 546 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
bogdanm 13:0645d8841f51 547 })
bogdanm 13:0645d8841f51 548
bogdanm 13:0645d8841f51 549 #define __SMLALDX(ARG1,ARG2,ARG3) \
bogdanm 13:0645d8841f51 550 ({ \
bogdanm 13:0645d8841f51 551 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((uint64_t)(ARG3) >> 32), __ARG3_L = (uint32_t)((uint64_t)(ARG3) & 0xFFFFFFFFUL); \
bogdanm 13:0645d8841f51 552 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
bogdanm 13:0645d8841f51 553 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
bogdanm 13:0645d8841f51 554 })
bogdanm 13:0645d8841f51 555
bogdanm 13:0645d8841f51 556 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 557 {
bogdanm 13:0645d8841f51 558 uint32_t result;
bogdanm 13:0645d8841f51 559
bogdanm 13:0645d8841f51 560 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 561 return(result);
bogdanm 13:0645d8841f51 562 }
bogdanm 13:0645d8841f51 563
bogdanm 13:0645d8841f51 564 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 565 {
bogdanm 13:0645d8841f51 566 uint32_t result;
bogdanm 13:0645d8841f51 567
bogdanm 13:0645d8841f51 568 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 569 return(result);
bogdanm 13:0645d8841f51 570 }
bogdanm 13:0645d8841f51 571
bogdanm 13:0645d8841f51 572 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
bogdanm 13:0645d8841f51 573 {
bogdanm 13:0645d8841f51 574 uint32_t result;
bogdanm 13:0645d8841f51 575
bogdanm 13:0645d8841f51 576 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
bogdanm 13:0645d8841f51 577 return(result);
bogdanm 13:0645d8841f51 578 }
bogdanm 13:0645d8841f51 579
bogdanm 13:0645d8841f51 580 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
bogdanm 13:0645d8841f51 581 {
bogdanm 13:0645d8841f51 582 uint32_t result;
bogdanm 13:0645d8841f51 583
bogdanm 13:0645d8841f51 584 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
bogdanm 13:0645d8841f51 585 return(result);
bogdanm 13:0645d8841f51 586 }
bogdanm 13:0645d8841f51 587
bogdanm 13:0645d8841f51 588 #define __SMLSLD(ARG1,ARG2,ARG3) \
bogdanm 13:0645d8841f51 589 ({ \
bogdanm 13:0645d8841f51 590 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((ARG3) >> 32), __ARG3_L = (uint32_t)((ARG3) & 0xFFFFFFFFUL); \
bogdanm 13:0645d8841f51 591 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
bogdanm 13:0645d8841f51 592 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
bogdanm 13:0645d8841f51 593 })
bogdanm 13:0645d8841f51 594
bogdanm 13:0645d8841f51 595 #define __SMLSLDX(ARG1,ARG2,ARG3) \
bogdanm 13:0645d8841f51 596 ({ \
bogdanm 13:0645d8841f51 597 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((ARG3) >> 32), __ARG3_L = (uint32_t)((ARG3) & 0xFFFFFFFFUL); \
bogdanm 13:0645d8841f51 598 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
bogdanm 13:0645d8841f51 599 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
bogdanm 13:0645d8841f51 600 })
bogdanm 13:0645d8841f51 601
bogdanm 13:0645d8841f51 602 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 603 {
bogdanm 13:0645d8841f51 604 uint32_t result;
bogdanm 13:0645d8841f51 605
bogdanm 13:0645d8841f51 606 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 607 return(result);
bogdanm 13:0645d8841f51 608 }
bogdanm 13:0645d8841f51 609
bogdanm 13:0645d8841f51 610 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 611 {
bogdanm 13:0645d8841f51 612 uint32_t result;
bogdanm 13:0645d8841f51 613
bogdanm 13:0645d8841f51 614 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 615 return(result);
bogdanm 13:0645d8841f51 616 }
bogdanm 13:0645d8841f51 617
bogdanm 13:0645d8841f51 618 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB(uint32_t op1, uint32_t op2)
bogdanm 13:0645d8841f51 619 {
bogdanm 13:0645d8841f51 620 uint32_t result;
bogdanm 13:0645d8841f51 621
bogdanm 13:0645d8841f51 622 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
bogdanm 13:0645d8841f51 623 return(result);
bogdanm 13:0645d8841f51 624 }
bogdanm 13:0645d8841f51 625
bogdanm 13:0645d8841f51 626 #define __PKHBT(ARG1,ARG2,ARG3) \
bogdanm 13:0645d8841f51 627 ({ \
bogdanm 13:0645d8841f51 628 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
bogdanm 13:0645d8841f51 629 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
bogdanm 13:0645d8841f51 630 __RES; \
bogdanm 13:0645d8841f51 631 })
bogdanm 13:0645d8841f51 632
bogdanm 13:0645d8841f51 633 #define __PKHTB(ARG1,ARG2,ARG3) \
bogdanm 13:0645d8841f51 634 ({ \
bogdanm 13:0645d8841f51 635 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
bogdanm 13:0645d8841f51 636 if (ARG3 == 0) \
bogdanm 13:0645d8841f51 637 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
bogdanm 13:0645d8841f51 638 else \
bogdanm 13:0645d8841f51 639 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
bogdanm 13:0645d8841f51 640 __RES; \
bogdanm 13:0645d8841f51 641 })
bogdanm 13:0645d8841f51 642
bogdanm 13:0645d8841f51 643 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
bogdanm 13:0645d8841f51 644 {
bogdanm 13:0645d8841f51 645 int32_t result;
bogdanm 13:0645d8841f51 646
bogdanm 13:0645d8841f51 647 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
bogdanm 13:0645d8841f51 648 return(result);
bogdanm 13:0645d8841f51 649 }
bogdanm 13:0645d8841f51 650
bogdanm 13:0645d8841f51 651 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 652
bogdanm 13:0645d8841f51 653
bogdanm 13:0645d8841f51 654
bogdanm 13:0645d8841f51 655 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
bogdanm 13:0645d8841f51 656 /* TASKING carm specific functions */
bogdanm 13:0645d8841f51 657
bogdanm 13:0645d8841f51 658
bogdanm 13:0645d8841f51 659 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 660 /* not yet supported */
bogdanm 13:0645d8841f51 661 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
bogdanm 13:0645d8841f51 662
bogdanm 13:0645d8841f51 663
bogdanm 13:0645d8841f51 664 #endif
bogdanm 13:0645d8841f51 665
bogdanm 13:0645d8841f51 666 /*@} end of group CMSIS_SIMD_intrinsics */
bogdanm 13:0645d8841f51 667
bogdanm 13:0645d8841f51 668
bogdanm 13:0645d8841f51 669 #endif /* __CORE_CM4_SIMD_H */
bogdanm 13:0645d8841f51 670
bogdanm 13:0645d8841f51 671 #ifdef __cplusplus
bogdanm 13:0645d8841f51 672 }
bogdanm 13:0645d8841f51 673 #endif