Martin Johnson / Space_Invaders_Demo

Dependencies:   STM32F3-Discovery

Committer:
MartinJohnson
Date:
Tue Mar 01 02:40:19 2016 +0000
Revision:
0:404dae88af71
space invaders game

Who changed what in which revision?

UserRevisionLine numberNew contents of line
MartinJohnson 0:404dae88af71 1 /**************************************************************************//**
MartinJohnson 0:404dae88af71 2 * @file core_cmSimd.h
MartinJohnson 0:404dae88af71 3 * @brief CMSIS Cortex-M SIMD Header File
MartinJohnson 0:404dae88af71 4 * @version V4.00
MartinJohnson 0:404dae88af71 5 * @date 22. August 2014
MartinJohnson 0:404dae88af71 6 *
MartinJohnson 0:404dae88af71 7 * @note
MartinJohnson 0:404dae88af71 8 *
MartinJohnson 0:404dae88af71 9 ******************************************************************************/
MartinJohnson 0:404dae88af71 10 /* Copyright (c) 2009 - 2014 ARM LIMITED
MartinJohnson 0:404dae88af71 11
MartinJohnson 0:404dae88af71 12 All rights reserved.
MartinJohnson 0:404dae88af71 13 Redistribution and use in source and binary forms, with or without
MartinJohnson 0:404dae88af71 14 modification, are permitted provided that the following conditions are met:
MartinJohnson 0:404dae88af71 15 - Redistributions of source code must retain the above copyright
MartinJohnson 0:404dae88af71 16 notice, this list of conditions and the following disclaimer.
MartinJohnson 0:404dae88af71 17 - Redistributions in binary form must reproduce the above copyright
MartinJohnson 0:404dae88af71 18 notice, this list of conditions and the following disclaimer in the
MartinJohnson 0:404dae88af71 19 documentation and/or other materials provided with the distribution.
MartinJohnson 0:404dae88af71 20 - Neither the name of ARM nor the names of its contributors may be used
MartinJohnson 0:404dae88af71 21 to endorse or promote products derived from this software without
MartinJohnson 0:404dae88af71 22 specific prior written permission.
MartinJohnson 0:404dae88af71 23 *
MartinJohnson 0:404dae88af71 24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
MartinJohnson 0:404dae88af71 25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
MartinJohnson 0:404dae88af71 26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
MartinJohnson 0:404dae88af71 27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
MartinJohnson 0:404dae88af71 28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
MartinJohnson 0:404dae88af71 29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
MartinJohnson 0:404dae88af71 30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
MartinJohnson 0:404dae88af71 31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
MartinJohnson 0:404dae88af71 32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
MartinJohnson 0:404dae88af71 33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
MartinJohnson 0:404dae88af71 34 POSSIBILITY OF SUCH DAMAGE.
MartinJohnson 0:404dae88af71 35 ---------------------------------------------------------------------------*/
MartinJohnson 0:404dae88af71 36
MartinJohnson 0:404dae88af71 37
MartinJohnson 0:404dae88af71 38 #if defined ( __ICCARM__ )
MartinJohnson 0:404dae88af71 39 #pragma system_include /* treat file as system include file for MISRA check */
MartinJohnson 0:404dae88af71 40 #endif
MartinJohnson 0:404dae88af71 41
MartinJohnson 0:404dae88af71 42 #ifndef __CORE_CMSIMD_H
MartinJohnson 0:404dae88af71 43 #define __CORE_CMSIMD_H
MartinJohnson 0:404dae88af71 44
MartinJohnson 0:404dae88af71 45 #ifdef __cplusplus
MartinJohnson 0:404dae88af71 46 extern "C" {
MartinJohnson 0:404dae88af71 47 #endif
MartinJohnson 0:404dae88af71 48
MartinJohnson 0:404dae88af71 49
MartinJohnson 0:404dae88af71 50 /*******************************************************************************
MartinJohnson 0:404dae88af71 51 * Hardware Abstraction Layer
MartinJohnson 0:404dae88af71 52 ******************************************************************************/
MartinJohnson 0:404dae88af71 53
MartinJohnson 0:404dae88af71 54
MartinJohnson 0:404dae88af71 55 /* ################### Compiler specific Intrinsics ########################### */
MartinJohnson 0:404dae88af71 56 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
MartinJohnson 0:404dae88af71 57 Access to dedicated SIMD instructions
MartinJohnson 0:404dae88af71 58 @{
MartinJohnson 0:404dae88af71 59 */
MartinJohnson 0:404dae88af71 60
MartinJohnson 0:404dae88af71 61 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
MartinJohnson 0:404dae88af71 62 /* ARM armcc specific functions */
MartinJohnson 0:404dae88af71 63 #define __SADD8 __sadd8
MartinJohnson 0:404dae88af71 64 #define __QADD8 __qadd8
MartinJohnson 0:404dae88af71 65 #define __SHADD8 __shadd8
MartinJohnson 0:404dae88af71 66 #define __UADD8 __uadd8
MartinJohnson 0:404dae88af71 67 #define __UQADD8 __uqadd8
MartinJohnson 0:404dae88af71 68 #define __UHADD8 __uhadd8
MartinJohnson 0:404dae88af71 69 #define __SSUB8 __ssub8
MartinJohnson 0:404dae88af71 70 #define __QSUB8 __qsub8
MartinJohnson 0:404dae88af71 71 #define __SHSUB8 __shsub8
MartinJohnson 0:404dae88af71 72 #define __USUB8 __usub8
MartinJohnson 0:404dae88af71 73 #define __UQSUB8 __uqsub8
MartinJohnson 0:404dae88af71 74 #define __UHSUB8 __uhsub8
MartinJohnson 0:404dae88af71 75 #define __SADD16 __sadd16
MartinJohnson 0:404dae88af71 76 #define __QADD16 __qadd16
MartinJohnson 0:404dae88af71 77 #define __SHADD16 __shadd16
MartinJohnson 0:404dae88af71 78 #define __UADD16 __uadd16
MartinJohnson 0:404dae88af71 79 #define __UQADD16 __uqadd16
MartinJohnson 0:404dae88af71 80 #define __UHADD16 __uhadd16
MartinJohnson 0:404dae88af71 81 #define __SSUB16 __ssub16
MartinJohnson 0:404dae88af71 82 #define __QSUB16 __qsub16
MartinJohnson 0:404dae88af71 83 #define __SHSUB16 __shsub16
MartinJohnson 0:404dae88af71 84 #define __USUB16 __usub16
MartinJohnson 0:404dae88af71 85 #define __UQSUB16 __uqsub16
MartinJohnson 0:404dae88af71 86 #define __UHSUB16 __uhsub16
MartinJohnson 0:404dae88af71 87 #define __SASX __sasx
MartinJohnson 0:404dae88af71 88 #define __QASX __qasx
MartinJohnson 0:404dae88af71 89 #define __SHASX __shasx
MartinJohnson 0:404dae88af71 90 #define __UASX __uasx
MartinJohnson 0:404dae88af71 91 #define __UQASX __uqasx
MartinJohnson 0:404dae88af71 92 #define __UHASX __uhasx
MartinJohnson 0:404dae88af71 93 #define __SSAX __ssax
MartinJohnson 0:404dae88af71 94 #define __QSAX __qsax
MartinJohnson 0:404dae88af71 95 #define __SHSAX __shsax
MartinJohnson 0:404dae88af71 96 #define __USAX __usax
MartinJohnson 0:404dae88af71 97 #define __UQSAX __uqsax
MartinJohnson 0:404dae88af71 98 #define __UHSAX __uhsax
MartinJohnson 0:404dae88af71 99 #define __USAD8 __usad8
MartinJohnson 0:404dae88af71 100 #define __USADA8 __usada8
MartinJohnson 0:404dae88af71 101 #define __SSAT16 __ssat16
MartinJohnson 0:404dae88af71 102 #define __USAT16 __usat16
MartinJohnson 0:404dae88af71 103 #define __UXTB16 __uxtb16
MartinJohnson 0:404dae88af71 104 #define __UXTAB16 __uxtab16
MartinJohnson 0:404dae88af71 105 #define __SXTB16 __sxtb16
MartinJohnson 0:404dae88af71 106 #define __SXTAB16 __sxtab16
MartinJohnson 0:404dae88af71 107 #define __SMUAD __smuad
MartinJohnson 0:404dae88af71 108 #define __SMUADX __smuadx
MartinJohnson 0:404dae88af71 109 #define __SMLAD __smlad
MartinJohnson 0:404dae88af71 110 #define __SMLADX __smladx
MartinJohnson 0:404dae88af71 111 #define __SMLALD __smlald
MartinJohnson 0:404dae88af71 112 #define __SMLALDX __smlaldx
MartinJohnson 0:404dae88af71 113 #define __SMUSD __smusd
MartinJohnson 0:404dae88af71 114 #define __SMUSDX __smusdx
MartinJohnson 0:404dae88af71 115 #define __SMLSD __smlsd
MartinJohnson 0:404dae88af71 116 #define __SMLSDX __smlsdx
MartinJohnson 0:404dae88af71 117 #define __SMLSLD __smlsld
MartinJohnson 0:404dae88af71 118 #define __SMLSLDX __smlsldx
MartinJohnson 0:404dae88af71 119 #define __SEL __sel
MartinJohnson 0:404dae88af71 120 #define __QADD __qadd
MartinJohnson 0:404dae88af71 121 #define __QSUB __qsub
MartinJohnson 0:404dae88af71 122
MartinJohnson 0:404dae88af71 123 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
MartinJohnson 0:404dae88af71 124 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
MartinJohnson 0:404dae88af71 125
MartinJohnson 0:404dae88af71 126 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
MartinJohnson 0:404dae88af71 127 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
MartinJohnson 0:404dae88af71 128
MartinJohnson 0:404dae88af71 129 #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
MartinJohnson 0:404dae88af71 130 ((int64_t)(ARG3) << 32) ) >> 32))
MartinJohnson 0:404dae88af71 131
MartinJohnson 0:404dae88af71 132
MartinJohnson 0:404dae88af71 133 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
MartinJohnson 0:404dae88af71 134 /* GNU gcc specific functions */
MartinJohnson 0:404dae88af71 135 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 136 {
MartinJohnson 0:404dae88af71 137 uint32_t result;
MartinJohnson 0:404dae88af71 138
MartinJohnson 0:404dae88af71 139 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 140 return(result);
MartinJohnson 0:404dae88af71 141 }
MartinJohnson 0:404dae88af71 142
MartinJohnson 0:404dae88af71 143 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 144 {
MartinJohnson 0:404dae88af71 145 uint32_t result;
MartinJohnson 0:404dae88af71 146
MartinJohnson 0:404dae88af71 147 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 148 return(result);
MartinJohnson 0:404dae88af71 149 }
MartinJohnson 0:404dae88af71 150
MartinJohnson 0:404dae88af71 151 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 152 {
MartinJohnson 0:404dae88af71 153 uint32_t result;
MartinJohnson 0:404dae88af71 154
MartinJohnson 0:404dae88af71 155 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 156 return(result);
MartinJohnson 0:404dae88af71 157 }
MartinJohnson 0:404dae88af71 158
MartinJohnson 0:404dae88af71 159 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 160 {
MartinJohnson 0:404dae88af71 161 uint32_t result;
MartinJohnson 0:404dae88af71 162
MartinJohnson 0:404dae88af71 163 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 164 return(result);
MartinJohnson 0:404dae88af71 165 }
MartinJohnson 0:404dae88af71 166
MartinJohnson 0:404dae88af71 167 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 168 {
MartinJohnson 0:404dae88af71 169 uint32_t result;
MartinJohnson 0:404dae88af71 170
MartinJohnson 0:404dae88af71 171 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 172 return(result);
MartinJohnson 0:404dae88af71 173 }
MartinJohnson 0:404dae88af71 174
MartinJohnson 0:404dae88af71 175 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 176 {
MartinJohnson 0:404dae88af71 177 uint32_t result;
MartinJohnson 0:404dae88af71 178
MartinJohnson 0:404dae88af71 179 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 180 return(result);
MartinJohnson 0:404dae88af71 181 }
MartinJohnson 0:404dae88af71 182
MartinJohnson 0:404dae88af71 183
MartinJohnson 0:404dae88af71 184 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 185 {
MartinJohnson 0:404dae88af71 186 uint32_t result;
MartinJohnson 0:404dae88af71 187
MartinJohnson 0:404dae88af71 188 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 189 return(result);
MartinJohnson 0:404dae88af71 190 }
MartinJohnson 0:404dae88af71 191
MartinJohnson 0:404dae88af71 192 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 193 {
MartinJohnson 0:404dae88af71 194 uint32_t result;
MartinJohnson 0:404dae88af71 195
MartinJohnson 0:404dae88af71 196 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 197 return(result);
MartinJohnson 0:404dae88af71 198 }
MartinJohnson 0:404dae88af71 199
MartinJohnson 0:404dae88af71 200 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 201 {
MartinJohnson 0:404dae88af71 202 uint32_t result;
MartinJohnson 0:404dae88af71 203
MartinJohnson 0:404dae88af71 204 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 205 return(result);
MartinJohnson 0:404dae88af71 206 }
MartinJohnson 0:404dae88af71 207
MartinJohnson 0:404dae88af71 208 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 209 {
MartinJohnson 0:404dae88af71 210 uint32_t result;
MartinJohnson 0:404dae88af71 211
MartinJohnson 0:404dae88af71 212 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 213 return(result);
MartinJohnson 0:404dae88af71 214 }
MartinJohnson 0:404dae88af71 215
MartinJohnson 0:404dae88af71 216 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 217 {
MartinJohnson 0:404dae88af71 218 uint32_t result;
MartinJohnson 0:404dae88af71 219
MartinJohnson 0:404dae88af71 220 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 221 return(result);
MartinJohnson 0:404dae88af71 222 }
MartinJohnson 0:404dae88af71 223
MartinJohnson 0:404dae88af71 224 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 225 {
MartinJohnson 0:404dae88af71 226 uint32_t result;
MartinJohnson 0:404dae88af71 227
MartinJohnson 0:404dae88af71 228 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 229 return(result);
MartinJohnson 0:404dae88af71 230 }
MartinJohnson 0:404dae88af71 231
MartinJohnson 0:404dae88af71 232
MartinJohnson 0:404dae88af71 233 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 234 {
MartinJohnson 0:404dae88af71 235 uint32_t result;
MartinJohnson 0:404dae88af71 236
MartinJohnson 0:404dae88af71 237 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 238 return(result);
MartinJohnson 0:404dae88af71 239 }
MartinJohnson 0:404dae88af71 240
MartinJohnson 0:404dae88af71 241 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 242 {
MartinJohnson 0:404dae88af71 243 uint32_t result;
MartinJohnson 0:404dae88af71 244
MartinJohnson 0:404dae88af71 245 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 246 return(result);
MartinJohnson 0:404dae88af71 247 }
MartinJohnson 0:404dae88af71 248
MartinJohnson 0:404dae88af71 249 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 250 {
MartinJohnson 0:404dae88af71 251 uint32_t result;
MartinJohnson 0:404dae88af71 252
MartinJohnson 0:404dae88af71 253 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 254 return(result);
MartinJohnson 0:404dae88af71 255 }
MartinJohnson 0:404dae88af71 256
MartinJohnson 0:404dae88af71 257 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 258 {
MartinJohnson 0:404dae88af71 259 uint32_t result;
MartinJohnson 0:404dae88af71 260
MartinJohnson 0:404dae88af71 261 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 262 return(result);
MartinJohnson 0:404dae88af71 263 }
MartinJohnson 0:404dae88af71 264
MartinJohnson 0:404dae88af71 265 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 266 {
MartinJohnson 0:404dae88af71 267 uint32_t result;
MartinJohnson 0:404dae88af71 268
MartinJohnson 0:404dae88af71 269 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 270 return(result);
MartinJohnson 0:404dae88af71 271 }
MartinJohnson 0:404dae88af71 272
MartinJohnson 0:404dae88af71 273 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 274 {
MartinJohnson 0:404dae88af71 275 uint32_t result;
MartinJohnson 0:404dae88af71 276
MartinJohnson 0:404dae88af71 277 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 278 return(result);
MartinJohnson 0:404dae88af71 279 }
MartinJohnson 0:404dae88af71 280
MartinJohnson 0:404dae88af71 281 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 282 {
MartinJohnson 0:404dae88af71 283 uint32_t result;
MartinJohnson 0:404dae88af71 284
MartinJohnson 0:404dae88af71 285 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 286 return(result);
MartinJohnson 0:404dae88af71 287 }
MartinJohnson 0:404dae88af71 288
MartinJohnson 0:404dae88af71 289 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 290 {
MartinJohnson 0:404dae88af71 291 uint32_t result;
MartinJohnson 0:404dae88af71 292
MartinJohnson 0:404dae88af71 293 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 294 return(result);
MartinJohnson 0:404dae88af71 295 }
MartinJohnson 0:404dae88af71 296
MartinJohnson 0:404dae88af71 297 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 298 {
MartinJohnson 0:404dae88af71 299 uint32_t result;
MartinJohnson 0:404dae88af71 300
MartinJohnson 0:404dae88af71 301 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 302 return(result);
MartinJohnson 0:404dae88af71 303 }
MartinJohnson 0:404dae88af71 304
MartinJohnson 0:404dae88af71 305 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 306 {
MartinJohnson 0:404dae88af71 307 uint32_t result;
MartinJohnson 0:404dae88af71 308
MartinJohnson 0:404dae88af71 309 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 310 return(result);
MartinJohnson 0:404dae88af71 311 }
MartinJohnson 0:404dae88af71 312
MartinJohnson 0:404dae88af71 313 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 314 {
MartinJohnson 0:404dae88af71 315 uint32_t result;
MartinJohnson 0:404dae88af71 316
MartinJohnson 0:404dae88af71 317 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 318 return(result);
MartinJohnson 0:404dae88af71 319 }
MartinJohnson 0:404dae88af71 320
MartinJohnson 0:404dae88af71 321 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 322 {
MartinJohnson 0:404dae88af71 323 uint32_t result;
MartinJohnson 0:404dae88af71 324
MartinJohnson 0:404dae88af71 325 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 326 return(result);
MartinJohnson 0:404dae88af71 327 }
MartinJohnson 0:404dae88af71 328
MartinJohnson 0:404dae88af71 329 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 330 {
MartinJohnson 0:404dae88af71 331 uint32_t result;
MartinJohnson 0:404dae88af71 332
MartinJohnson 0:404dae88af71 333 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 334 return(result);
MartinJohnson 0:404dae88af71 335 }
MartinJohnson 0:404dae88af71 336
MartinJohnson 0:404dae88af71 337 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 338 {
MartinJohnson 0:404dae88af71 339 uint32_t result;
MartinJohnson 0:404dae88af71 340
MartinJohnson 0:404dae88af71 341 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 342 return(result);
MartinJohnson 0:404dae88af71 343 }
MartinJohnson 0:404dae88af71 344
MartinJohnson 0:404dae88af71 345 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 346 {
MartinJohnson 0:404dae88af71 347 uint32_t result;
MartinJohnson 0:404dae88af71 348
MartinJohnson 0:404dae88af71 349 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 350 return(result);
MartinJohnson 0:404dae88af71 351 }
MartinJohnson 0:404dae88af71 352
MartinJohnson 0:404dae88af71 353 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 354 {
MartinJohnson 0:404dae88af71 355 uint32_t result;
MartinJohnson 0:404dae88af71 356
MartinJohnson 0:404dae88af71 357 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 358 return(result);
MartinJohnson 0:404dae88af71 359 }
MartinJohnson 0:404dae88af71 360
MartinJohnson 0:404dae88af71 361 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 362 {
MartinJohnson 0:404dae88af71 363 uint32_t result;
MartinJohnson 0:404dae88af71 364
MartinJohnson 0:404dae88af71 365 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 366 return(result);
MartinJohnson 0:404dae88af71 367 }
MartinJohnson 0:404dae88af71 368
MartinJohnson 0:404dae88af71 369 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 370 {
MartinJohnson 0:404dae88af71 371 uint32_t result;
MartinJohnson 0:404dae88af71 372
MartinJohnson 0:404dae88af71 373 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 374 return(result);
MartinJohnson 0:404dae88af71 375 }
MartinJohnson 0:404dae88af71 376
MartinJohnson 0:404dae88af71 377 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 378 {
MartinJohnson 0:404dae88af71 379 uint32_t result;
MartinJohnson 0:404dae88af71 380
MartinJohnson 0:404dae88af71 381 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 382 return(result);
MartinJohnson 0:404dae88af71 383 }
MartinJohnson 0:404dae88af71 384
MartinJohnson 0:404dae88af71 385 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 386 {
MartinJohnson 0:404dae88af71 387 uint32_t result;
MartinJohnson 0:404dae88af71 388
MartinJohnson 0:404dae88af71 389 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 390 return(result);
MartinJohnson 0:404dae88af71 391 }
MartinJohnson 0:404dae88af71 392
MartinJohnson 0:404dae88af71 393 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 394 {
MartinJohnson 0:404dae88af71 395 uint32_t result;
MartinJohnson 0:404dae88af71 396
MartinJohnson 0:404dae88af71 397 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 398 return(result);
MartinJohnson 0:404dae88af71 399 }
MartinJohnson 0:404dae88af71 400
MartinJohnson 0:404dae88af71 401 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 402 {
MartinJohnson 0:404dae88af71 403 uint32_t result;
MartinJohnson 0:404dae88af71 404
MartinJohnson 0:404dae88af71 405 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 406 return(result);
MartinJohnson 0:404dae88af71 407 }
MartinJohnson 0:404dae88af71 408
MartinJohnson 0:404dae88af71 409 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 410 {
MartinJohnson 0:404dae88af71 411 uint32_t result;
MartinJohnson 0:404dae88af71 412
MartinJohnson 0:404dae88af71 413 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 414 return(result);
MartinJohnson 0:404dae88af71 415 }
MartinJohnson 0:404dae88af71 416
MartinJohnson 0:404dae88af71 417 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 418 {
MartinJohnson 0:404dae88af71 419 uint32_t result;
MartinJohnson 0:404dae88af71 420
MartinJohnson 0:404dae88af71 421 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 422 return(result);
MartinJohnson 0:404dae88af71 423 }
MartinJohnson 0:404dae88af71 424
MartinJohnson 0:404dae88af71 425 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 426 {
MartinJohnson 0:404dae88af71 427 uint32_t result;
MartinJohnson 0:404dae88af71 428
MartinJohnson 0:404dae88af71 429 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 430 return(result);
MartinJohnson 0:404dae88af71 431 }
MartinJohnson 0:404dae88af71 432
MartinJohnson 0:404dae88af71 433 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
MartinJohnson 0:404dae88af71 434 {
MartinJohnson 0:404dae88af71 435 uint32_t result;
MartinJohnson 0:404dae88af71 436
MartinJohnson 0:404dae88af71 437 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
MartinJohnson 0:404dae88af71 438 return(result);
MartinJohnson 0:404dae88af71 439 }
MartinJohnson 0:404dae88af71 440
MartinJohnson 0:404dae88af71 441 #define __SSAT16(ARG1,ARG2) \
MartinJohnson 0:404dae88af71 442 ({ \
MartinJohnson 0:404dae88af71 443 uint32_t __RES, __ARG1 = (ARG1); \
MartinJohnson 0:404dae88af71 444 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
MartinJohnson 0:404dae88af71 445 __RES; \
MartinJohnson 0:404dae88af71 446 })
MartinJohnson 0:404dae88af71 447
MartinJohnson 0:404dae88af71 448 #define __USAT16(ARG1,ARG2) \
MartinJohnson 0:404dae88af71 449 ({ \
MartinJohnson 0:404dae88af71 450 uint32_t __RES, __ARG1 = (ARG1); \
MartinJohnson 0:404dae88af71 451 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
MartinJohnson 0:404dae88af71 452 __RES; \
MartinJohnson 0:404dae88af71 453 })
MartinJohnson 0:404dae88af71 454
MartinJohnson 0:404dae88af71 455 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
MartinJohnson 0:404dae88af71 456 {
MartinJohnson 0:404dae88af71 457 uint32_t result;
MartinJohnson 0:404dae88af71 458
MartinJohnson 0:404dae88af71 459 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
MartinJohnson 0:404dae88af71 460 return(result);
MartinJohnson 0:404dae88af71 461 }
MartinJohnson 0:404dae88af71 462
MartinJohnson 0:404dae88af71 463 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 464 {
MartinJohnson 0:404dae88af71 465 uint32_t result;
MartinJohnson 0:404dae88af71 466
MartinJohnson 0:404dae88af71 467 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 468 return(result);
MartinJohnson 0:404dae88af71 469 }
MartinJohnson 0:404dae88af71 470
MartinJohnson 0:404dae88af71 471 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
MartinJohnson 0:404dae88af71 472 {
MartinJohnson 0:404dae88af71 473 uint32_t result;
MartinJohnson 0:404dae88af71 474
MartinJohnson 0:404dae88af71 475 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
MartinJohnson 0:404dae88af71 476 return(result);
MartinJohnson 0:404dae88af71 477 }
MartinJohnson 0:404dae88af71 478
MartinJohnson 0:404dae88af71 479 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 480 {
MartinJohnson 0:404dae88af71 481 uint32_t result;
MartinJohnson 0:404dae88af71 482
MartinJohnson 0:404dae88af71 483 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 484 return(result);
MartinJohnson 0:404dae88af71 485 }
MartinJohnson 0:404dae88af71 486
MartinJohnson 0:404dae88af71 487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 488 {
MartinJohnson 0:404dae88af71 489 uint32_t result;
MartinJohnson 0:404dae88af71 490
MartinJohnson 0:404dae88af71 491 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 492 return(result);
MartinJohnson 0:404dae88af71 493 }
MartinJohnson 0:404dae88af71 494
MartinJohnson 0:404dae88af71 495 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 496 {
MartinJohnson 0:404dae88af71 497 uint32_t result;
MartinJohnson 0:404dae88af71 498
MartinJohnson 0:404dae88af71 499 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 500 return(result);
MartinJohnson 0:404dae88af71 501 }
MartinJohnson 0:404dae88af71 502
MartinJohnson 0:404dae88af71 503 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
MartinJohnson 0:404dae88af71 504 {
MartinJohnson 0:404dae88af71 505 uint32_t result;
MartinJohnson 0:404dae88af71 506
MartinJohnson 0:404dae88af71 507 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
MartinJohnson 0:404dae88af71 508 return(result);
MartinJohnson 0:404dae88af71 509 }
MartinJohnson 0:404dae88af71 510
MartinJohnson 0:404dae88af71 511 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
MartinJohnson 0:404dae88af71 512 {
MartinJohnson 0:404dae88af71 513 uint32_t result;
MartinJohnson 0:404dae88af71 514
MartinJohnson 0:404dae88af71 515 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
MartinJohnson 0:404dae88af71 516 return(result);
MartinJohnson 0:404dae88af71 517 }
MartinJohnson 0:404dae88af71 518
MartinJohnson 0:404dae88af71 519 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
MartinJohnson 0:404dae88af71 520 {
MartinJohnson 0:404dae88af71 521 union llreg_u{
MartinJohnson 0:404dae88af71 522 uint32_t w32[2];
MartinJohnson 0:404dae88af71 523 uint64_t w64;
MartinJohnson 0:404dae88af71 524 } llr;
MartinJohnson 0:404dae88af71 525 llr.w64 = acc;
MartinJohnson 0:404dae88af71 526
MartinJohnson 0:404dae88af71 527 #ifndef __ARMEB__ // Little endian
MartinJohnson 0:404dae88af71 528 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
MartinJohnson 0:404dae88af71 529 #else // Big endian
MartinJohnson 0:404dae88af71 530 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
MartinJohnson 0:404dae88af71 531 #endif
MartinJohnson 0:404dae88af71 532
MartinJohnson 0:404dae88af71 533 return(llr.w64);
MartinJohnson 0:404dae88af71 534 }
MartinJohnson 0:404dae88af71 535
MartinJohnson 0:404dae88af71 536 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
MartinJohnson 0:404dae88af71 537 {
MartinJohnson 0:404dae88af71 538 union llreg_u{
MartinJohnson 0:404dae88af71 539 uint32_t w32[2];
MartinJohnson 0:404dae88af71 540 uint64_t w64;
MartinJohnson 0:404dae88af71 541 } llr;
MartinJohnson 0:404dae88af71 542 llr.w64 = acc;
MartinJohnson 0:404dae88af71 543
MartinJohnson 0:404dae88af71 544 #ifndef __ARMEB__ // Little endian
MartinJohnson 0:404dae88af71 545 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
MartinJohnson 0:404dae88af71 546 #else // Big endian
MartinJohnson 0:404dae88af71 547 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
MartinJohnson 0:404dae88af71 548 #endif
MartinJohnson 0:404dae88af71 549
MartinJohnson 0:404dae88af71 550 return(llr.w64);
MartinJohnson 0:404dae88af71 551 }
MartinJohnson 0:404dae88af71 552
MartinJohnson 0:404dae88af71 553 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 554 {
MartinJohnson 0:404dae88af71 555 uint32_t result;
MartinJohnson 0:404dae88af71 556
MartinJohnson 0:404dae88af71 557 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 558 return(result);
MartinJohnson 0:404dae88af71 559 }
MartinJohnson 0:404dae88af71 560
MartinJohnson 0:404dae88af71 561 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 562 {
MartinJohnson 0:404dae88af71 563 uint32_t result;
MartinJohnson 0:404dae88af71 564
MartinJohnson 0:404dae88af71 565 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 566 return(result);
MartinJohnson 0:404dae88af71 567 }
MartinJohnson 0:404dae88af71 568
MartinJohnson 0:404dae88af71 569 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
MartinJohnson 0:404dae88af71 570 {
MartinJohnson 0:404dae88af71 571 uint32_t result;
MartinJohnson 0:404dae88af71 572
MartinJohnson 0:404dae88af71 573 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
MartinJohnson 0:404dae88af71 574 return(result);
MartinJohnson 0:404dae88af71 575 }
MartinJohnson 0:404dae88af71 576
MartinJohnson 0:404dae88af71 577 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
MartinJohnson 0:404dae88af71 578 {
MartinJohnson 0:404dae88af71 579 uint32_t result;
MartinJohnson 0:404dae88af71 580
MartinJohnson 0:404dae88af71 581 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
MartinJohnson 0:404dae88af71 582 return(result);
MartinJohnson 0:404dae88af71 583 }
MartinJohnson 0:404dae88af71 584
MartinJohnson 0:404dae88af71 585 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
MartinJohnson 0:404dae88af71 586 {
MartinJohnson 0:404dae88af71 587 union llreg_u{
MartinJohnson 0:404dae88af71 588 uint32_t w32[2];
MartinJohnson 0:404dae88af71 589 uint64_t w64;
MartinJohnson 0:404dae88af71 590 } llr;
MartinJohnson 0:404dae88af71 591 llr.w64 = acc;
MartinJohnson 0:404dae88af71 592
MartinJohnson 0:404dae88af71 593 #ifndef __ARMEB__ // Little endian
MartinJohnson 0:404dae88af71 594 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
MartinJohnson 0:404dae88af71 595 #else // Big endian
MartinJohnson 0:404dae88af71 596 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
MartinJohnson 0:404dae88af71 597 #endif
MartinJohnson 0:404dae88af71 598
MartinJohnson 0:404dae88af71 599 return(llr.w64);
MartinJohnson 0:404dae88af71 600 }
MartinJohnson 0:404dae88af71 601
MartinJohnson 0:404dae88af71 602 __attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
MartinJohnson 0:404dae88af71 603 {
MartinJohnson 0:404dae88af71 604 union llreg_u{
MartinJohnson 0:404dae88af71 605 uint32_t w32[2];
MartinJohnson 0:404dae88af71 606 uint64_t w64;
MartinJohnson 0:404dae88af71 607 } llr;
MartinJohnson 0:404dae88af71 608 llr.w64 = acc;
MartinJohnson 0:404dae88af71 609
MartinJohnson 0:404dae88af71 610 #ifndef __ARMEB__ // Little endian
MartinJohnson 0:404dae88af71 611 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
MartinJohnson 0:404dae88af71 612 #else // Big endian
MartinJohnson 0:404dae88af71 613 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
MartinJohnson 0:404dae88af71 614 #endif
MartinJohnson 0:404dae88af71 615
MartinJohnson 0:404dae88af71 616 return(llr.w64);
MartinJohnson 0:404dae88af71 617 }
MartinJohnson 0:404dae88af71 618
MartinJohnson 0:404dae88af71 619 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 620 {
MartinJohnson 0:404dae88af71 621 uint32_t result;
MartinJohnson 0:404dae88af71 622
MartinJohnson 0:404dae88af71 623 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 624 return(result);
MartinJohnson 0:404dae88af71 625 }
MartinJohnson 0:404dae88af71 626
MartinJohnson 0:404dae88af71 627 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 628 {
MartinJohnson 0:404dae88af71 629 uint32_t result;
MartinJohnson 0:404dae88af71 630
MartinJohnson 0:404dae88af71 631 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 632 return(result);
MartinJohnson 0:404dae88af71 633 }
MartinJohnson 0:404dae88af71 634
MartinJohnson 0:404dae88af71 635 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB(uint32_t op1, uint32_t op2)
MartinJohnson 0:404dae88af71 636 {
MartinJohnson 0:404dae88af71 637 uint32_t result;
MartinJohnson 0:404dae88af71 638
MartinJohnson 0:404dae88af71 639 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
MartinJohnson 0:404dae88af71 640 return(result);
MartinJohnson 0:404dae88af71 641 }
MartinJohnson 0:404dae88af71 642
MartinJohnson 0:404dae88af71 643 #define __PKHBT(ARG1,ARG2,ARG3) \
MartinJohnson 0:404dae88af71 644 ({ \
MartinJohnson 0:404dae88af71 645 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
MartinJohnson 0:404dae88af71 646 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
MartinJohnson 0:404dae88af71 647 __RES; \
MartinJohnson 0:404dae88af71 648 })
MartinJohnson 0:404dae88af71 649
MartinJohnson 0:404dae88af71 650 #define __PKHTB(ARG1,ARG2,ARG3) \
MartinJohnson 0:404dae88af71 651 ({ \
MartinJohnson 0:404dae88af71 652 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
MartinJohnson 0:404dae88af71 653 if (ARG3 == 0) \
MartinJohnson 0:404dae88af71 654 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
MartinJohnson 0:404dae88af71 655 else \
MartinJohnson 0:404dae88af71 656 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
MartinJohnson 0:404dae88af71 657 __RES; \
MartinJohnson 0:404dae88af71 658 })
MartinJohnson 0:404dae88af71 659
MartinJohnson 0:404dae88af71 660 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
MartinJohnson 0:404dae88af71 661 {
MartinJohnson 0:404dae88af71 662 int32_t result;
MartinJohnson 0:404dae88af71 663
MartinJohnson 0:404dae88af71 664 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
MartinJohnson 0:404dae88af71 665 return(result);
MartinJohnson 0:404dae88af71 666 }
MartinJohnson 0:404dae88af71 667
MartinJohnson 0:404dae88af71 668
MartinJohnson 0:404dae88af71 669 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
MartinJohnson 0:404dae88af71 670 /* IAR iccarm specific functions */
MartinJohnson 0:404dae88af71 671 #include <cmsis_iar.h>
MartinJohnson 0:404dae88af71 672
MartinJohnson 0:404dae88af71 673
MartinJohnson 0:404dae88af71 674 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
MartinJohnson 0:404dae88af71 675 /* TI CCS specific functions */
MartinJohnson 0:404dae88af71 676 #include <cmsis_ccs.h>
MartinJohnson 0:404dae88af71 677
MartinJohnson 0:404dae88af71 678
MartinJohnson 0:404dae88af71 679 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
MartinJohnson 0:404dae88af71 680 /* TASKING carm specific functions */
MartinJohnson 0:404dae88af71 681 /* not yet supported */
MartinJohnson 0:404dae88af71 682
MartinJohnson 0:404dae88af71 683
MartinJohnson 0:404dae88af71 684 #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/
MartinJohnson 0:404dae88af71 685 /* Cosmic specific functions */
MartinJohnson 0:404dae88af71 686 #include <cmsis_csm.h>
MartinJohnson 0:404dae88af71 687
MartinJohnson 0:404dae88af71 688 #endif
MartinJohnson 0:404dae88af71 689
MartinJohnson 0:404dae88af71 690 /*@} end of group CMSIS_SIMD_intrinsics */
MartinJohnson 0:404dae88af71 691
MartinJohnson 0:404dae88af71 692
MartinJohnson 0:404dae88af71 693 #ifdef __cplusplus
MartinJohnson 0:404dae88af71 694 }
MartinJohnson 0:404dae88af71 695 #endif
MartinJohnson 0:404dae88af71 696
MartinJohnson 0:404dae88af71 697 #endif /* __CORE_CMSIMD_H */