Robert Lopez / CMSIS5
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers cmsis_armclang.h Source File

cmsis_armclang.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     cmsis_armclang.h
00003  * @brief    CMSIS compiler armclang (Arm Compiler 6) header file
00004  * @version  V5.0.4
00005  * @date     10. January 2018
00006  ******************************************************************************/
00007 /*
00008  * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
00009  *
00010  * SPDX-License-Identifier: Apache-2.0
00011  *
00012  * Licensed under the Apache License, Version 2.0 (the License); you may
00013  * not use this file except in compliance with the License.
00014  * You may obtain a copy of the License at
00015  *
00016  * www.apache.org/licenses/LICENSE-2.0
00017  *
00018  * Unless required by applicable law or agreed to in writing, software
00019  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
00020  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00021  * See the License for the specific language governing permissions and
00022  * limitations under the License.
00023  */
00024 
00025 /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
00026 
00027 #ifndef __CMSIS_ARMCLANG_H
00028 #define __CMSIS_ARMCLANG_H
00029 
00030 #pragma clang system_header   /* treat file as system include file */
00031 
00032 #ifndef __ARM_COMPAT_H
00033 #include <arm_compat.h>    /* Compatibility header for Arm Compiler 5 intrinsics */
00034 #endif
00035 
00036 /* CMSIS compiler specific defines */
00037 #ifndef   __ASM
00038   #define __ASM                                  __asm
00039 #endif
00040 #ifndef   __INLINE
00041   #define __INLINE                               __inline
00042 #endif
00043 #ifndef   __STATIC_INLINE
00044   #define __STATIC_INLINE                        static __inline
00045 #endif
00046 #ifndef   __STATIC_FORCEINLINE                 
00047   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static __inline
00048 #endif                                           
00049 #ifndef   __NO_RETURN
00050   #define __NO_RETURN                            __attribute__((__noreturn__))
00051 #endif
00052 #ifndef   __USED
00053   #define __USED                                 __attribute__((used))
00054 #endif
00055 #ifndef   __WEAK
00056   #define __WEAK                                 __attribute__((weak))
00057 #endif
00058 #ifndef   __PACKED
00059   #define __PACKED                               __attribute__((packed, aligned(1)))
00060 #endif
00061 #ifndef   __PACKED_STRUCT
00062   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
00063 #endif
00064 #ifndef   __PACKED_UNION
00065   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
00066 #endif
00067 #ifndef   __UNALIGNED_UINT32        /* deprecated */
00068   #pragma clang diagnostic push
00069   #pragma clang diagnostic ignored "-Wpacked"
00070 /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
00071   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
00072   #pragma clang diagnostic pop
00073   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
00074 #endif
00075 #ifndef   __UNALIGNED_UINT16_WRITE
00076   #pragma clang diagnostic push
00077   #pragma clang diagnostic ignored "-Wpacked"
00078 /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
00079   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
00080   #pragma clang diagnostic pop
00081   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
00082 #endif
00083 #ifndef   __UNALIGNED_UINT16_READ
00084   #pragma clang diagnostic push
00085   #pragma clang diagnostic ignored "-Wpacked"
00086 /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
00087   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
00088   #pragma clang diagnostic pop
00089   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
00090 #endif
00091 #ifndef   __UNALIGNED_UINT32_WRITE
00092   #pragma clang diagnostic push
00093   #pragma clang diagnostic ignored "-Wpacked"
00094 /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
00095   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
00096   #pragma clang diagnostic pop
00097   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
00098 #endif
00099 #ifndef   __UNALIGNED_UINT32_READ
00100   #pragma clang diagnostic push
00101   #pragma clang diagnostic ignored "-Wpacked"
00102 /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
00103   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
00104   #pragma clang diagnostic pop
00105   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
00106 #endif
00107 #ifndef   __ALIGNED
00108   #define __ALIGNED(x)                           __attribute__((aligned(x)))
00109 #endif
00110 #ifndef   __RESTRICT
00111   #define __RESTRICT                             __restrict
00112 #endif
00113 
00114 
00115 /* ###########################  Core Function Access  ########################### */
00116 /** \ingroup  CMSIS_Core_FunctionInterface
00117     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00118   @{
00119  */
00120 
00121 /**
00122   \brief   Enable IRQ Interrupts
00123   \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
00124            Can only be executed in Privileged modes.
00125  */
00126 /* intrinsic void __enable_irq();  see arm_compat.h */
00127 
00128 
00129 /**
00130   \brief   Disable IRQ Interrupts
00131   \details Disables IRQ interrupts by setting the I-bit in the CPSR.
00132            Can only be executed in Privileged modes.
00133  */
00134 /* intrinsic void __disable_irq();  see arm_compat.h */
00135 
00136 
00137 /**
00138   \brief   Get Control Register
00139   \details Returns the content of the Control Register.
00140   \return               Control Register value
00141  */
00142 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
00143 {
00144   uint32_t result;
00145 
00146   __ASM volatile ("MRS %0, control" : "=r" (result) );
00147   return(result);
00148 }
00149 
00150 
00151 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00152 /**
00153   \brief   Get Control Register (non-secure)
00154   \details Returns the content of the non-secure Control Register when in secure mode.
00155   \return               non-secure Control Register value
00156  */
00157 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
00158 {
00159   uint32_t result;
00160 
00161   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
00162   return(result);
00163 }
00164 #endif
00165 
00166 
00167 /**
00168   \brief   Set Control Register
00169   \details Writes the given value to the Control Register.
00170   \param [in]    control  Control Register value to set
00171  */
00172 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
00173 {
00174   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
00175 }
00176 
00177 
00178 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00179 /**
00180   \brief   Set Control Register (non-secure)
00181   \details Writes the given value to the non-secure Control Register when in secure state.
00182   \param [in]    control  Control Register value to set
00183  */
00184 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
00185 {
00186   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
00187 }
00188 #endif
00189 
00190 
00191 /**
00192   \brief   Get IPSR Register
00193   \details Returns the content of the IPSR Register.
00194   \return               IPSR Register value
00195  */
00196 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
00197 {
00198   uint32_t result;
00199 
00200   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
00201   return(result);
00202 }
00203 
00204 
00205 /**
00206   \brief   Get APSR Register
00207   \details Returns the content of the APSR Register.
00208   \return               APSR Register value
00209  */
00210 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
00211 {
00212   uint32_t result;
00213 
00214   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
00215   return(result);
00216 }
00217 
00218 
00219 /**
00220   \brief   Get xPSR Register
00221   \details Returns the content of the xPSR Register.
00222   \return               xPSR Register value
00223  */
00224 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
00225 {
00226   uint32_t result;
00227 
00228   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
00229   return(result);
00230 }
00231 
00232 
00233 /**
00234   \brief   Get Process Stack Pointer
00235   \details Returns the current value of the Process Stack Pointer (PSP).
00236   \return               PSP Register value
00237  */
00238 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
00239 {
00240   register uint32_t result;
00241 
00242   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
00243   return(result);
00244 }
00245 
00246 
00247 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00248 /**
00249   \brief   Get Process Stack Pointer (non-secure)
00250   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
00251   \return               PSP Register value
00252  */
00253 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
00254 {
00255   register uint32_t result;
00256 
00257   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
00258   return(result);
00259 }
00260 #endif
00261 
00262 
00263 /**
00264   \brief   Set Process Stack Pointer
00265   \details Assigns the given value to the Process Stack Pointer (PSP).
00266   \param [in]    topOfProcStack  Process Stack Pointer value to set
00267  */
00268 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
00269 {
00270   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
00271 }
00272 
00273 
00274 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00275 /**
00276   \brief   Set Process Stack Pointer (non-secure)
00277   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
00278   \param [in]    topOfProcStack  Process Stack Pointer value to set
00279  */
00280 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
00281 {
00282   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
00283 }
00284 #endif
00285 
00286 
00287 /**
00288   \brief   Get Main Stack Pointer
00289   \details Returns the current value of the Main Stack Pointer (MSP).
00290   \return               MSP Register value
00291  */
00292 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
00293 {
00294   register uint32_t result;
00295 
00296   __ASM volatile ("MRS %0, msp" : "=r" (result) );
00297   return(result);
00298 }
00299 
00300 
00301 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00302 /**
00303   \brief   Get Main Stack Pointer (non-secure)
00304   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
00305   \return               MSP Register value
00306  */
00307 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
00308 {
00309   register uint32_t result;
00310 
00311   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
00312   return(result);
00313 }
00314 #endif
00315 
00316 
00317 /**
00318   \brief   Set Main Stack Pointer
00319   \details Assigns the given value to the Main Stack Pointer (MSP).
00320   \param [in]    topOfMainStack  Main Stack Pointer value to set
00321  */
00322 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
00323 {
00324   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
00325 }
00326 
00327 
00328 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00329 /**
00330   \brief   Set Main Stack Pointer (non-secure)
00331   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
00332   \param [in]    topOfMainStack  Main Stack Pointer value to set
00333  */
00334 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
00335 {
00336   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
00337 }
00338 #endif
00339 
00340 
00341 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00342 /**
00343   \brief   Get Stack Pointer (non-secure)
00344   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
00345   \return               SP Register value
00346  */
00347 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
00348 {
00349   register uint32_t result;
00350 
00351   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
00352   return(result);
00353 }
00354 
00355 
00356 /**
00357   \brief   Set Stack Pointer (non-secure)
00358   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
00359   \param [in]    topOfStack  Stack Pointer value to set
00360  */
00361 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
00362 {
00363   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
00364 }
00365 #endif
00366 
00367 
00368 /**
00369   \brief   Get Priority Mask
00370   \details Returns the current state of the priority mask bit from the Priority Mask Register.
00371   \return               Priority Mask value
00372  */
00373 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
00374 {
00375   uint32_t result;
00376 
00377   __ASM volatile ("MRS %0, primask" : "=r" (result) );
00378   return(result);
00379 }
00380 
00381 
00382 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00383 /**
00384   \brief   Get Priority Mask (non-secure)
00385   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
00386   \return               Priority Mask value
00387  */
00388 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
00389 {
00390   uint32_t result;
00391 
00392   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
00393   return(result);
00394 }
00395 #endif
00396 
00397 
00398 /**
00399   \brief   Set Priority Mask
00400   \details Assigns the given value to the Priority Mask Register.
00401   \param [in]    priMask  Priority Mask
00402  */
00403 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
00404 {
00405   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
00406 }
00407 
00408 
00409 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00410 /**
00411   \brief   Set Priority Mask (non-secure)
00412   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
00413   \param [in]    priMask  Priority Mask
00414  */
00415 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
00416 {
00417   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
00418 }
00419 #endif
00420 
00421 
00422 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00423      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00424      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
00425 /**
00426   \brief   Enable FIQ
00427   \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
00428            Can only be executed in Privileged modes.
00429  */
00430 #define __enable_fault_irq                __enable_fiq   /* see arm_compat.h */
00431 
00432 
00433 /**
00434   \brief   Disable FIQ
00435   \details Disables FIQ interrupts by setting the F-bit in the CPSR.
00436            Can only be executed in Privileged modes.
00437  */
00438 #define __disable_fault_irq               __disable_fiq   /* see arm_compat.h */
00439 
00440 
00441 /**
00442   \brief   Get Base Priority
00443   \details Returns the current value of the Base Priority register.
00444   \return               Base Priority register value
00445  */
00446 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
00447 {
00448   uint32_t result;
00449 
00450   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
00451   return(result);
00452 }
00453 
00454 
00455 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00456 /**
00457   \brief   Get Base Priority (non-secure)
00458   \details Returns the current value of the non-secure Base Priority register when in secure state.
00459   \return               Base Priority register value
00460  */
00461 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
00462 {
00463   uint32_t result;
00464 
00465   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
00466   return(result);
00467 }
00468 #endif
00469 
00470 
00471 /**
00472   \brief   Set Base Priority
00473   \details Assigns the given value to the Base Priority register.
00474   \param [in]    basePri  Base Priority value to set
00475  */
00476 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
00477 {
00478   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
00479 }
00480 
00481 
00482 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00483 /**
00484   \brief   Set Base Priority (non-secure)
00485   \details Assigns the given value to the non-secure Base Priority register when in secure state.
00486   \param [in]    basePri  Base Priority value to set
00487  */
00488 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
00489 {
00490   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
00491 }
00492 #endif
00493 
00494 
00495 /**
00496   \brief   Set Base Priority with condition
00497   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
00498            or the new value increases the BASEPRI priority level.
00499   \param [in]    basePri  Base Priority value to set
00500  */
00501 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
00502 {
00503   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
00504 }
00505 
00506 
00507 /**
00508   \brief   Get Fault Mask
00509   \details Returns the current value of the Fault Mask register.
00510   \return               Fault Mask register value
00511  */
00512 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
00513 {
00514   uint32_t result;
00515 
00516   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
00517   return(result);
00518 }
00519 
00520 
00521 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00522 /**
00523   \brief   Get Fault Mask (non-secure)
00524   \details Returns the current value of the non-secure Fault Mask register when in secure state.
00525   \return               Fault Mask register value
00526  */
00527 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
00528 {
00529   uint32_t result;
00530 
00531   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
00532   return(result);
00533 }
00534 #endif
00535 
00536 
00537 /**
00538   \brief   Set Fault Mask
00539   \details Assigns the given value to the Fault Mask register.
00540   \param [in]    faultMask  Fault Mask value to set
00541  */
00542 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
00543 {
00544   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
00545 }
00546 
00547 
00548 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00549 /**
00550   \brief   Set Fault Mask (non-secure)
00551   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
00552   \param [in]    faultMask  Fault Mask value to set
00553  */
00554 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
00555 {
00556   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
00557 }
00558 #endif
00559 
00560 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00561            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00562            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
00563 
00564 
00565 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00566      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
00567 
00568 /**
00569   \brief   Get Process Stack Pointer Limit
00570   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00571   Stack Pointer Limit register hence zero is returned always in non-secure
00572   mode.
00573   
00574   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
00575   \return               PSPLIM Register value
00576  */
00577 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
00578 {
00579 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00580     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00581     // without main extensions, the non-secure PSPLIM is RAZ/WI
00582   return 0U;
00583 #else
00584   register uint32_t result;
00585   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
00586   return result;
00587 #endif
00588 }
00589 
00590 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
00591 /**
00592   \brief   Get Process Stack Pointer Limit (non-secure)
00593   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00594   Stack Pointer Limit register hence zero is returned always in non-secure
00595   mode.
00596 
00597   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
00598   \return               PSPLIM Register value
00599  */
00600 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
00601 {
00602 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00603   // without main extensions, the non-secure PSPLIM is RAZ/WI
00604   return 0U;
00605 #else
00606   register uint32_t result;
00607   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
00608   return result;
00609 #endif
00610 }
00611 #endif
00612 
00613 
00614 /**
00615   \brief   Set Process Stack Pointer Limit
00616   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00617   Stack Pointer Limit register hence the write is silently ignored in non-secure
00618   mode.
00619   
00620   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
00621   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
00622  */
00623 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
00624 {
00625 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00626     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00627   // without main extensions, the non-secure PSPLIM is RAZ/WI
00628   (void)ProcStackPtrLimit;
00629 #else
00630   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
00631 #endif
00632 }
00633 
00634 
00635 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00636 /**
00637   \brief   Set Process Stack Pointer (non-secure)
00638   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00639   Stack Pointer Limit register hence the write is silently ignored in non-secure
00640   mode.
00641 
00642   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
00643   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
00644  */
00645 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
00646 {
00647 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00648   // without main extensions, the non-secure PSPLIM is RAZ/WI
00649   (void)ProcStackPtrLimit;
00650 #else
00651   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
00652 #endif
00653 }
00654 #endif
00655 
00656 
00657 /**
00658   \brief   Get Main Stack Pointer Limit
00659   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00660   Stack Pointer Limit register hence zero is returned always.
00661 
00662   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
00663   \return               MSPLIM Register value
00664  */
00665 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
00666 {
00667 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00668     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00669   // without main extensions, the non-secure MSPLIM is RAZ/WI
00670   return 0U;
00671 #else
00672   register uint32_t result;
00673   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
00674   return result;
00675 #endif
00676 }
00677 
00678 
00679 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00680 /**
00681   \brief   Get Main Stack Pointer Limit (non-secure)
00682   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00683   Stack Pointer Limit register hence zero is returned always.
00684 
00685   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
00686   \return               MSPLIM Register value
00687  */
00688 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
00689 {
00690 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00691   // without main extensions, the non-secure MSPLIM is RAZ/WI
00692   return 0U;
00693 #else
00694   register uint32_t result;
00695   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
00696   return result;
00697 #endif
00698 }
00699 #endif
00700 
00701 
00702 /**
00703   \brief   Set Main Stack Pointer Limit
00704   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00705   Stack Pointer Limit register hence the write is silently ignored.
00706 
00707   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
00708   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
00709  */
00710 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
00711 {
00712 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00713     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00714   // without main extensions, the non-secure MSPLIM is RAZ/WI
00715   (void)MainStackPtrLimit;
00716 #else
00717   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
00718 #endif
00719 }
00720 
00721 
00722 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00723 /**
00724   \brief   Set Main Stack Pointer Limit (non-secure)
00725   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00726   Stack Pointer Limit register hence the write is silently ignored.
00727 
00728   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
00729   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
00730  */
00731 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
00732 {
00733 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00734   // without main extensions, the non-secure MSPLIM is RAZ/WI
00735   (void)MainStackPtrLimit;
00736 #else
00737   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
00738 #endif
00739 }
00740 #endif
00741 
00742 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00743            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
00744 
00745 
00746 #if ((defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00747      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
00748 
00749 /**
00750   \brief   Get FPSCR
00751   \details Returns the current value of the Floating Point Status/Control register.
00752   \return               Floating Point Status/Control register value
00753  */
00754 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
00755      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
00756 #define __get_FPSCR      (uint32_t)__builtin_arm_get_fpscr
00757 #else
00758 #define __get_FPSCR()      ((uint32_t)0U)
00759 #endif
00760 
00761 /**
00762   \brief   Set FPSCR
00763   \details Assigns the given value to the Floating Point Status/Control register.
00764   \param [in]    fpscr  Floating Point Status/Control value to set
00765  */
00766 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
00767      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
00768 #define __set_FPSCR      __builtin_arm_set_fpscr
00769 #else
00770 #define __set_FPSCR(x)      ((void)(x))
00771 #endif
00772 
00773 #endif /* ((defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00774            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
00775 
00776 
00777 
00778 /*@} end of CMSIS_Core_RegAccFunctions */
00779 
00780 
00781 /* ##########################  Core Instruction Access  ######################### */
00782 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
00783   Access to dedicated instructions
00784   @{
00785 */
00786 
00787 /* Define macros for porting to both thumb1 and thumb2.
00788  * For thumb1, use low register (r0-r7), specified by constraint "l"
00789  * Otherwise, use general registers, specified by constraint "r" */
00790 #if defined (__thumb__) && !defined (__thumb2__)
00791 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
00792 #define __CMSIS_GCC_USE_REG(r) "l" (r)
00793 #else
00794 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
00795 #define __CMSIS_GCC_USE_REG(r) "r" (r)
00796 #endif
00797 
00798 /**
00799   \brief   No Operation
00800   \details No Operation does nothing. This instruction can be used for code alignment purposes.
00801  */
00802 #define __NOP          __builtin_arm_nop
00803 
00804 /**
00805   \brief   Wait For Interrupt
00806   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
00807  */
00808 #define __WFI          __builtin_arm_wfi
00809 
00810 
00811 /**
00812   \brief   Wait For Event
00813   \details Wait For Event is a hint instruction that permits the processor to enter
00814            a low-power state until one of a number of events occurs.
00815  */
00816 #define __WFE          __builtin_arm_wfe
00817 
00818 
00819 /**
00820   \brief   Send Event
00821   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
00822  */
00823 #define __SEV          __builtin_arm_sev
00824 
00825 
00826 /**
00827   \brief   Instruction Synchronization Barrier
00828   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
00829            so that all instructions following the ISB are fetched from cache or memory,
00830            after the instruction has been completed.
00831  */
00832 #define __ISB()        __builtin_arm_isb(0xF);
00833 
00834 /**
00835   \brief   Data Synchronization Barrier
00836   \details Acts as a special kind of Data Memory Barrier.
00837            It completes when all explicit memory accesses before this instruction complete.
00838  */
00839 #define __DSB()        __builtin_arm_dsb(0xF);
00840 
00841 
00842 /**
00843   \brief   Data Memory Barrier
00844   \details Ensures the apparent order of the explicit memory operations before
00845            and after the instruction, without ensuring their completion.
00846  */
00847 #define __DMB()        __builtin_arm_dmb(0xF);
00848 
00849 
00850 /**
00851   \brief   Reverse byte order (32 bit)
00852   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
00853   \param [in]    value  Value to reverse
00854   \return               Reversed value
00855  */
00856 #define __REV(value)   __builtin_bswap32(value)
00857 
00858 
00859 /**
00860   \brief   Reverse byte order (16 bit)
00861   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
00862   \param [in]    value  Value to reverse
00863   \return               Reversed value
00864  */
00865 #define __REV16(value) __ROR(__REV(value), 16)
00866 
00867 
00868 /**
00869   \brief   Reverse byte order (16 bit)
00870   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
00871   \param [in]    value  Value to reverse
00872   \return               Reversed value
00873  */
00874 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
00875 
00876 
00877 /**
00878   \brief   Rotate Right in unsigned value (32 bit)
00879   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
00880   \param [in]    op1  Value to rotate
00881   \param [in]    op2  Number of Bits to rotate
00882   \return               Rotated value
00883  */
00884 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
00885 {
00886   op2 %= 32U;
00887   if (op2 == 0U)
00888   {
00889     return op1;
00890   }
00891   return (op1 >> op2) | (op1 << (32U - op2));
00892 }
00893 
00894 
00895 /**
00896   \brief   Breakpoint
00897   \details Causes the processor to enter Debug state.
00898            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
00899   \param [in]    value  is ignored by the processor.
00900                  If required, a debugger can use it to store additional information about the breakpoint.
00901  */
00902 #define __BKPT(value)     __ASM volatile ("bkpt "#value)
00903 
00904 
00905 /**
00906   \brief   Reverse bit order of value
00907   \details Reverses the bit order of the given value.
00908   \param [in]    value  Value to reverse
00909   \return               Reversed value
00910  */
00911 #define __RBIT            __builtin_arm_rbit
00912 
00913 /**
00914   \brief   Count leading zeros
00915   \details Counts the number of leading zeros of a data value.
00916   \param [in]  value  Value to count the leading zeros
00917   \return             number of leading zeros in value
00918  */
00919 #define __CLZ             (uint8_t)__builtin_clz
00920 
00921 
00922 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00923      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00924      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00925      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
00926 /**
00927   \brief   LDR Exclusive (8 bit)
00928   \details Executes a exclusive LDR instruction for 8 bit value.
00929   \param [in]    ptr  Pointer to data
00930   \return             value of type uint8_t at (*ptr)
00931  */
00932 #define __LDREXB        (uint8_t)__builtin_arm_ldrex
00933 
00934 
00935 /**
00936   \brief   LDR Exclusive (16 bit)
00937   \details Executes a exclusive LDR instruction for 16 bit values.
00938   \param [in]    ptr  Pointer to data
00939   \return        value of type uint16_t at (*ptr)
00940  */
00941 #define __LDREXH        (uint16_t)__builtin_arm_ldrex
00942 
00943 
00944 /**
00945   \brief   LDR Exclusive (32 bit)
00946   \details Executes a exclusive LDR instruction for 32 bit values.
00947   \param [in]    ptr  Pointer to data
00948   \return        value of type uint32_t at (*ptr)
00949  */
00950 #define __LDREXW        (uint32_t)__builtin_arm_ldrex
00951 
00952 
00953 /**
00954   \brief   STR Exclusive (8 bit)
00955   \details Executes a exclusive STR instruction for 8 bit values.
00956   \param [in]  value  Value to store
00957   \param [in]    ptr  Pointer to location
00958   \return          0  Function succeeded
00959   \return          1  Function failed
00960  */
00961 #define __STREXB        (uint32_t)__builtin_arm_strex
00962 
00963 
00964 /**
00965   \brief   STR Exclusive (16 bit)
00966   \details Executes a exclusive STR instruction for 16 bit values.
00967   \param [in]  value  Value to store
00968   \param [in]    ptr  Pointer to location
00969   \return          0  Function succeeded
00970   \return          1  Function failed
00971  */
00972 #define __STREXH        (uint32_t)__builtin_arm_strex
00973 
00974 
00975 /**
00976   \brief   STR Exclusive (32 bit)
00977   \details Executes a exclusive STR instruction for 32 bit values.
00978   \param [in]  value  Value to store
00979   \param [in]    ptr  Pointer to location
00980   \return          0  Function succeeded
00981   \return          1  Function failed
00982  */
00983 #define __STREXW        (uint32_t)__builtin_arm_strex
00984 
00985 
00986 /**
00987   \brief   Remove the exclusive lock
00988   \details Removes the exclusive lock which is created by LDREX.
00989  */
00990 #define __CLREX             __builtin_arm_clrex
00991 
00992 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00993            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00994            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00995            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
00996 
00997 
00998 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00999      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01000      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
01001 
01002 /**
01003   \brief   Signed Saturate
01004   \details Saturates a signed value.
01005   \param [in]  value  Value to be saturated
01006   \param [in]    sat  Bit position to saturate to (1..32)
01007   \return             Saturated value
01008  */
01009 #define __SSAT             __builtin_arm_ssat
01010 
01011 
01012 /**
01013   \brief   Unsigned Saturate
01014   \details Saturates an unsigned value.
01015   \param [in]  value  Value to be saturated
01016   \param [in]    sat  Bit position to saturate to (0..31)
01017   \return             Saturated value
01018  */
01019 #define __USAT             __builtin_arm_usat
01020 
01021 
01022 /**
01023   \brief   Rotate Right with Extend (32 bit)
01024   \details Moves each bit of a bitstring right by one bit.
01025            The carry input is shifted in at the left end of the bitstring.
01026   \param [in]    value  Value to rotate
01027   \return               Rotated value
01028  */
01029 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
01030 {
01031   uint32_t result;
01032 
01033   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
01034   return(result);
01035 }
01036 
01037 
01038 /**
01039   \brief   LDRT Unprivileged (8 bit)
01040   \details Executes a Unprivileged LDRT instruction for 8 bit value.
01041   \param [in]    ptr  Pointer to data
01042   \return             value of type uint8_t at (*ptr)
01043  */
01044 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
01045 {
01046   uint32_t result;
01047 
01048   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
01049   return ((uint8_t) result);    /* Add explicit type cast here */
01050 }
01051 
01052 
01053 /**
01054   \brief   LDRT Unprivileged (16 bit)
01055   \details Executes a Unprivileged LDRT instruction for 16 bit values.
01056   \param [in]    ptr  Pointer to data
01057   \return        value of type uint16_t at (*ptr)
01058  */
01059 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
01060 {
01061   uint32_t result;
01062 
01063   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
01064   return ((uint16_t) result);    /* Add explicit type cast here */
01065 }
01066 
01067 
01068 /**
01069   \brief   LDRT Unprivileged (32 bit)
01070   \details Executes a Unprivileged LDRT instruction for 32 bit values.
01071   \param [in]    ptr  Pointer to data
01072   \return        value of type uint32_t at (*ptr)
01073  */
01074 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
01075 {
01076   uint32_t result;
01077 
01078   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
01079   return(result);
01080 }
01081 
01082 
01083 /**
01084   \brief   STRT Unprivileged (8 bit)
01085   \details Executes a Unprivileged STRT instruction for 8 bit values.
01086   \param [in]  value  Value to store
01087   \param [in]    ptr  Pointer to location
01088  */
01089 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
01090 {
01091   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01092 }
01093 
01094 
01095 /**
01096   \brief   STRT Unprivileged (16 bit)
01097   \details Executes a Unprivileged STRT instruction for 16 bit values.
01098   \param [in]  value  Value to store
01099   \param [in]    ptr  Pointer to location
01100  */
01101 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
01102 {
01103   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01104 }
01105 
01106 
01107 /**
01108   \brief   STRT Unprivileged (32 bit)
01109   \details Executes a Unprivileged STRT instruction for 32 bit values.
01110   \param [in]  value  Value to store
01111   \param [in]    ptr  Pointer to location
01112  */
01113 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
01114 {
01115   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
01116 }
01117 
01118 #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01119            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01120            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
01121 
01122 /**
01123   \brief   Signed Saturate
01124   \details Saturates a signed value.
01125   \param [in]  value  Value to be saturated
01126   \param [in]    sat  Bit position to saturate to (1..32)
01127   \return             Saturated value
01128  */
01129 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
01130 {
01131   if ((sat >= 1U) && (sat <= 32U))
01132   {
01133     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
01134     const int32_t min = -1 - max ;
01135     if (val > max)
01136     {
01137       return max;
01138     }
01139     else if (val < min)
01140     {
01141       return min;
01142     }
01143   }
01144   return val;
01145 }
01146 
01147 /**
01148   \brief   Unsigned Saturate
01149   \details Saturates an unsigned value.
01150   \param [in]  value  Value to be saturated
01151   \param [in]    sat  Bit position to saturate to (0..31)
01152   \return             Saturated value
01153  */
01154 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
01155 {
01156   if (sat <= 31U)
01157   {
01158     const uint32_t max = ((1U << sat) - 1U);
01159     if (val > (int32_t)max)
01160     {
01161       return max;
01162     }
01163     else if (val < 0)
01164     {
01165       return 0U;
01166     }
01167   }
01168   return (uint32_t)val;
01169 }
01170 
01171 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01172            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01173            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
01174 
01175 
01176 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01177      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
01178 /**
01179   \brief   Load-Acquire (8 bit)
01180   \details Executes a LDAB instruction for 8 bit value.
01181   \param [in]    ptr  Pointer to data
01182   \return             value of type uint8_t at (*ptr)
01183  */
01184 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
01185 {
01186   uint32_t result;
01187 
01188   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
01189   return ((uint8_t) result);
01190 }
01191 
01192 
01193 /**
01194   \brief   Load-Acquire (16 bit)
01195   \details Executes a LDAH instruction for 16 bit values.
01196   \param [in]    ptr  Pointer to data
01197   \return        value of type uint16_t at (*ptr)
01198  */
01199 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
01200 {
01201   uint32_t result;
01202 
01203   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
01204   return ((uint16_t) result);
01205 }
01206 
01207 
01208 /**
01209   \brief   Load-Acquire (32 bit)
01210   \details Executes a LDA instruction for 32 bit values.
01211   \param [in]    ptr  Pointer to data
01212   \return        value of type uint32_t at (*ptr)
01213  */
01214 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
01215 {
01216   uint32_t result;
01217 
01218   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
01219   return(result);
01220 }
01221 
01222 
01223 /**
01224   \brief   Store-Release (8 bit)
01225   \details Executes a STLB instruction for 8 bit values.
01226   \param [in]  value  Value to store
01227   \param [in]    ptr  Pointer to location
01228  */
01229 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
01230 {
01231   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01232 }
01233 
01234 
01235 /**
01236   \brief   Store-Release (16 bit)
01237   \details Executes a STLH instruction for 16 bit values.
01238   \param [in]  value  Value to store
01239   \param [in]    ptr  Pointer to location
01240  */
01241 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
01242 {
01243   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01244 }
01245 
01246 
01247 /**
01248   \brief   Store-Release (32 bit)
01249   \details Executes a STL instruction for 32 bit values.
01250   \param [in]  value  Value to store
01251   \param [in]    ptr  Pointer to location
01252  */
01253 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
01254 {
01255   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01256 }
01257 
01258 
01259 /**
01260   \brief   Load-Acquire Exclusive (8 bit)
01261   \details Executes a LDAB exclusive instruction for 8 bit value.
01262   \param [in]    ptr  Pointer to data
01263   \return             value of type uint8_t at (*ptr)
01264  */
01265 #define     __LDAEXB                 (uint8_t)__builtin_arm_ldaex
01266 
01267 
01268 /**
01269   \brief   Load-Acquire Exclusive (16 bit)
01270   \details Executes a LDAH exclusive instruction for 16 bit values.
01271   \param [in]    ptr  Pointer to data
01272   \return        value of type uint16_t at (*ptr)
01273  */
01274 #define     __LDAEXH                 (uint16_t)__builtin_arm_ldaex
01275 
01276 
01277 /**
01278   \brief   Load-Acquire Exclusive (32 bit)
01279   \details Executes a LDA exclusive instruction for 32 bit values.
01280   \param [in]    ptr  Pointer to data
01281   \return        value of type uint32_t at (*ptr)
01282  */
01283 #define     __LDAEX                  (uint32_t)__builtin_arm_ldaex
01284 
01285 
01286 /**
01287   \brief   Store-Release Exclusive (8 bit)
01288   \details Executes a STLB exclusive instruction for 8 bit values.
01289   \param [in]  value  Value to store
01290   \param [in]    ptr  Pointer to location
01291   \return          0  Function succeeded
01292   \return          1  Function failed
01293  */
01294 #define     __STLEXB                 (uint32_t)__builtin_arm_stlex
01295 
01296 
01297 /**
01298   \brief   Store-Release Exclusive (16 bit)
01299   \details Executes a STLH exclusive instruction for 16 bit values.
01300   \param [in]  value  Value to store
01301   \param [in]    ptr  Pointer to location
01302   \return          0  Function succeeded
01303   \return          1  Function failed
01304  */
01305 #define     __STLEXH                 (uint32_t)__builtin_arm_stlex
01306 
01307 
01308 /**
01309   \brief   Store-Release Exclusive (32 bit)
01310   \details Executes a STL exclusive instruction for 32 bit values.
01311   \param [in]  value  Value to store
01312   \param [in]    ptr  Pointer to location
01313   \return          0  Function succeeded
01314   \return          1  Function failed
01315  */
01316 #define     __STLEX                  (uint32_t)__builtin_arm_stlex
01317 
01318 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01319            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
01320 
01321 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
01322 
01323 
01324 /* ###################  Compiler specific Intrinsics  ########################### */
01325 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
01326   Access to dedicated SIMD instructions
01327   @{
01328 */
01329 
01330 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
01331 
01332 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
01333 {
01334   uint32_t result;
01335 
01336   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01337   return(result);
01338 }
01339 
01340 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
01341 {
01342   uint32_t result;
01343 
01344   __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01345   return(result);
01346 }
01347 
01348 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
01349 {
01350   uint32_t result;
01351 
01352   __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01353   return(result);
01354 }
01355 
01356 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
01357 {
01358   uint32_t result;
01359 
01360   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01361   return(result);
01362 }
01363 
01364 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
01365 {
01366   uint32_t result;
01367 
01368   __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01369   return(result);
01370 }
01371 
01372 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
01373 {
01374   uint32_t result;
01375 
01376   __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01377   return(result);
01378 }
01379 
01380 
01381 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
01382 {
01383   uint32_t result;
01384 
01385   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01386   return(result);
01387 }
01388 
01389 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
01390 {
01391   uint32_t result;
01392 
01393   __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01394   return(result);
01395 }
01396 
01397 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
01398 {
01399   uint32_t result;
01400 
01401   __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01402   return(result);
01403 }
01404 
01405 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
01406 {
01407   uint32_t result;
01408 
01409   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01410   return(result);
01411 }
01412 
01413 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
01414 {
01415   uint32_t result;
01416 
01417   __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01418   return(result);
01419 }
01420 
01421 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
01422 {
01423   uint32_t result;
01424 
01425   __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01426   return(result);
01427 }
01428 
01429 
01430 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
01431 {
01432   uint32_t result;
01433 
01434   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01435   return(result);
01436 }
01437 
01438 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
01439 {
01440   uint32_t result;
01441 
01442   __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01443   return(result);
01444 }
01445 
01446 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
01447 {
01448   uint32_t result;
01449 
01450   __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01451   return(result);
01452 }
01453 
01454 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
01455 {
01456   uint32_t result;
01457 
01458   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01459   return(result);
01460 }
01461 
01462 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
01463 {
01464   uint32_t result;
01465 
01466   __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01467   return(result);
01468 }
01469 
01470 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
01471 {
01472   uint32_t result;
01473 
01474   __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01475   return(result);
01476 }
01477 
01478 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
01479 {
01480   uint32_t result;
01481 
01482   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01483   return(result);
01484 }
01485 
01486 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
01487 {
01488   uint32_t result;
01489 
01490   __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01491   return(result);
01492 }
01493 
01494 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
01495 {
01496   uint32_t result;
01497 
01498   __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01499   return(result);
01500 }
01501 
01502 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
01503 {
01504   uint32_t result;
01505 
01506   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01507   return(result);
01508 }
01509 
01510 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
01511 {
01512   uint32_t result;
01513 
01514   __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01515   return(result);
01516 }
01517 
01518 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
01519 {
01520   uint32_t result;
01521 
01522   __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01523   return(result);
01524 }
01525 
01526 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
01527 {
01528   uint32_t result;
01529 
01530   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01531   return(result);
01532 }
01533 
01534 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
01535 {
01536   uint32_t result;
01537 
01538   __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01539   return(result);
01540 }
01541 
01542 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
01543 {
01544   uint32_t result;
01545 
01546   __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01547   return(result);
01548 }
01549 
01550 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
01551 {
01552   uint32_t result;
01553 
01554   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01555   return(result);
01556 }
01557 
01558 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
01559 {
01560   uint32_t result;
01561 
01562   __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01563   return(result);
01564 }
01565 
01566 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
01567 {
01568   uint32_t result;
01569 
01570   __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01571   return(result);
01572 }
01573 
01574 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
01575 {
01576   uint32_t result;
01577 
01578   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01579   return(result);
01580 }
01581 
01582 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
01583 {
01584   uint32_t result;
01585 
01586   __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01587   return(result);
01588 }
01589 
01590 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
01591 {
01592   uint32_t result;
01593 
01594   __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01595   return(result);
01596 }
01597 
01598 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
01599 {
01600   uint32_t result;
01601 
01602   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01603   return(result);
01604 }
01605 
01606 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
01607 {
01608   uint32_t result;
01609 
01610   __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01611   return(result);
01612 }
01613 
01614 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
01615 {
01616   uint32_t result;
01617 
01618   __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01619   return(result);
01620 }
01621 
01622 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
01623 {
01624   uint32_t result;
01625 
01626   __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01627   return(result);
01628 }
01629 
01630 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
01631 {
01632   uint32_t result;
01633 
01634   __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01635   return(result);
01636 }
01637 
01638 #define __SSAT16(ARG1,ARG2) \
01639 ({                          \
01640   int32_t __RES, __ARG1 = (ARG1); \
01641   __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
01642   __RES; \
01643  })
01644 
01645 #define __USAT16(ARG1,ARG2) \
01646 ({                          \
01647   uint32_t __RES, __ARG1 = (ARG1); \
01648   __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
01649   __RES; \
01650  })
01651 
01652 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
01653 {
01654   uint32_t result;
01655 
01656   __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
01657   return(result);
01658 }
01659 
01660 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
01661 {
01662   uint32_t result;
01663 
01664   __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01665   return(result);
01666 }
01667 
01668 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
01669 {
01670   uint32_t result;
01671 
01672   __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
01673   return(result);
01674 }
01675 
01676 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
01677 {
01678   uint32_t result;
01679 
01680   __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01681   return(result);
01682 }
01683 
01684 __STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
01685 {
01686   uint32_t result;
01687 
01688   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01689   return(result);
01690 }
01691 
01692 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
01693 {
01694   uint32_t result;
01695 
01696   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01697   return(result);
01698 }
01699 
01700 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
01701 {
01702   uint32_t result;
01703 
01704   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01705   return(result);
01706 }
01707 
01708 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
01709 {
01710   uint32_t result;
01711 
01712   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01713   return(result);
01714 }
01715 
01716 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
01717 {
01718   union llreg_u{
01719     uint32_t w32[2];
01720     uint64_t w64;
01721   } llr;
01722   llr.w64 = acc;
01723 
01724 #ifndef __ARMEB__   /* Little endian */
01725   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
01726 #else               /* Big endian */
01727   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
01728 #endif
01729 
01730   return(llr.w64);
01731 }
01732 
01733 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
01734 {
01735   union llreg_u{
01736     uint32_t w32[2];
01737     uint64_t w64;
01738   } llr;
01739   llr.w64 = acc;
01740 
01741 #ifndef __ARMEB__   /* Little endian */
01742   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
01743 #else               /* Big endian */
01744   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
01745 #endif
01746 
01747   return(llr.w64);
01748 }
01749 
01750 __STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
01751 {
01752   uint32_t result;
01753 
01754   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01755   return(result);
01756 }
01757 
01758 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
01759 {
01760   uint32_t result;
01761 
01762   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01763   return(result);
01764 }
01765 
01766 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
01767 {
01768   uint32_t result;
01769 
01770   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01771   return(result);
01772 }
01773 
01774 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
01775 {
01776   uint32_t result;
01777 
01778   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01779   return(result);
01780 }
01781 
01782 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
01783 {
01784   union llreg_u{
01785     uint32_t w32[2];
01786     uint64_t w64;
01787   } llr;
01788   llr.w64 = acc;
01789 
01790 #ifndef __ARMEB__   /* Little endian */
01791   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
01792 #else               /* Big endian */
01793   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
01794 #endif
01795 
01796   return(llr.w64);
01797 }
01798 
01799 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
01800 {
01801   union llreg_u{
01802     uint32_t w32[2];
01803     uint64_t w64;
01804   } llr;
01805   llr.w64 = acc;
01806 
01807 #ifndef __ARMEB__   /* Little endian */
01808   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
01809 #else               /* Big endian */
01810   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
01811 #endif
01812 
01813   return(llr.w64);
01814 }
01815 
01816 __STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
01817 {
01818   uint32_t result;
01819 
01820   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01821   return(result);
01822 }
01823 
01824 __STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
01825 {
01826   int32_t result;
01827 
01828   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01829   return(result);
01830 }
01831 
01832 __STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
01833 {
01834   int32_t result;
01835 
01836   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01837   return(result);
01838 }
01839 
01840 #if 0
01841 #define __PKHBT(ARG1,ARG2,ARG3) \
01842 ({                          \
01843   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
01844   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
01845   __RES; \
01846  })
01847 
01848 #define __PKHTB(ARG1,ARG2,ARG3) \
01849 ({                          \
01850   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
01851   if (ARG3 == 0) \
01852     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
01853   else \
01854     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
01855   __RES; \
01856  })
01857 #endif
01858 
01859 #define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
01860                                            ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
01861 
01862 #define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
01863                                            ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
01864 
01865 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
01866 {
01867   int32_t result;
01868 
01869   __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
01870   return(result);
01871 }
01872 
01873 #endif /* (__ARM_FEATURE_DSP == 1) */
01874 /*@} end of group CMSIS_SIMD_intrinsics */
01875 
01876 
01877 #endif /* __CMSIS_ARMCLANG_H */
01878