Robert Lopez / CMSIS5
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers cmsis_gcc.h Source File

cmsis_gcc.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     cmsis_gcc.h
00003  * @brief    CMSIS compiler GCC header file
00004  * @version  V5.0.3
00005  * @date     16. January 2018
00006  ******************************************************************************/
00007 /*
00008  * Copyright (c) 2009-2017 ARM Limited. All rights reserved.
00009  *
00010  * SPDX-License-Identifier: Apache-2.0
00011  *
00012  * Licensed under the Apache License, Version 2.0 (the License); you may
00013  * not use this file except in compliance with the License.
00014  * You may obtain a copy of the License at
00015  *
00016  * www.apache.org/licenses/LICENSE-2.0
00017  *
00018  * Unless required by applicable law or agreed to in writing, software
00019  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
00020  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00021  * See the License for the specific language governing permissions and
00022  * limitations under the License.
00023  */
00024 
00025 #ifndef __CMSIS_GCC_H
00026 #define __CMSIS_GCC_H
00027 
00028 /* ignore some GCC warnings */
00029 #pragma GCC diagnostic push
00030 #pragma GCC diagnostic ignored "-Wsign-conversion"
00031 #pragma GCC diagnostic ignored "-Wconversion"
00032 #pragma GCC diagnostic ignored "-Wunused-parameter"
00033 
00034 /* Fallback for __has_builtin */
00035 #ifndef __has_builtin
00036   #define __has_builtin(x) (0)
00037 #endif
00038 
00039 /* CMSIS compiler specific defines */
00040 #ifndef   __ASM
00041   #define __ASM                                  __asm
00042 #endif
00043 #ifndef   __INLINE
00044   #define __INLINE                               inline
00045 #endif
00046 #ifndef   __STATIC_INLINE
00047   #define __STATIC_INLINE                        static inline
00048 #endif
00049 #ifndef   __STATIC_FORCEINLINE                 
00050   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
00051 #endif                                           
00052 #ifndef   __NO_RETURN
00053   #define __NO_RETURN                            __attribute__((__noreturn__))
00054 #endif
00055 #ifndef   __USED
00056   #define __USED                                 __attribute__((used))
00057 #endif
00058 #ifndef   __WEAK
00059   #define __WEAK                                 __attribute__((weak))
00060 #endif
00061 #ifndef   __PACKED
00062   #define __PACKED                               __attribute__((packed, aligned(1)))
00063 #endif
00064 #ifndef   __PACKED_STRUCT
00065   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
00066 #endif
00067 #ifndef   __PACKED_UNION
00068   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
00069 #endif
00070 #ifndef   __UNALIGNED_UINT32        /* deprecated */
00071   #pragma GCC diagnostic push
00072   #pragma GCC diagnostic ignored "-Wpacked"
00073   #pragma GCC diagnostic ignored "-Wattributes"
00074   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
00075   #pragma GCC diagnostic pop
00076   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
00077 #endif
00078 #ifndef   __UNALIGNED_UINT16_WRITE
00079   #pragma GCC diagnostic push
00080   #pragma GCC diagnostic ignored "-Wpacked"
00081   #pragma GCC diagnostic ignored "-Wattributes"
00082   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
00083   #pragma GCC diagnostic pop
00084   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
00085 #endif
00086 #ifndef   __UNALIGNED_UINT16_READ
00087   #pragma GCC diagnostic push
00088   #pragma GCC diagnostic ignored "-Wpacked"
00089   #pragma GCC diagnostic ignored "-Wattributes"
00090   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
00091   #pragma GCC diagnostic pop
00092   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
00093 #endif
00094 #ifndef   __UNALIGNED_UINT32_WRITE
00095   #pragma GCC diagnostic push
00096   #pragma GCC diagnostic ignored "-Wpacked"
00097   #pragma GCC diagnostic ignored "-Wattributes"
00098   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
00099   #pragma GCC diagnostic pop
00100   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
00101 #endif
00102 #ifndef   __UNALIGNED_UINT32_READ
00103   #pragma GCC diagnostic push
00104   #pragma GCC diagnostic ignored "-Wpacked"
00105   #pragma GCC diagnostic ignored "-Wattributes"
00106   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
00107   #pragma GCC diagnostic pop
00108   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
00109 #endif
00110 #ifndef   __ALIGNED
00111   #define __ALIGNED(x)                           __attribute__((aligned(x)))
00112 #endif
00113 #ifndef   __RESTRICT
00114   #define __RESTRICT                             __restrict
00115 #endif
00116 
00117 
00118 /* ###########################  Core Function Access  ########################### */
00119 /** \ingroup  CMSIS_Core_FunctionInterface
00120     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00121   @{
00122  */
00123 
00124 /**
00125   \brief   Enable IRQ Interrupts
00126   \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
00127            Can only be executed in Privileged modes.
00128  */
00129 __STATIC_FORCEINLINE void __enable_irq(void)
00130 {
00131   __ASM volatile ("cpsie i" : : : "memory");
00132 }
00133 
00134 
00135 /**
00136   \brief   Disable IRQ Interrupts
00137   \details Disables IRQ interrupts by setting the I-bit in the CPSR.
00138            Can only be executed in Privileged modes.
00139  */
00140 __STATIC_FORCEINLINE void __disable_irq(void)
00141 {
00142   __ASM volatile ("cpsid i" : : : "memory");
00143 }
00144 
00145 
00146 /**
00147   \brief   Get Control Register
00148   \details Returns the content of the Control Register.
00149   \return               Control Register value
00150  */
00151 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
00152 {
00153   uint32_t result;
00154 
00155   __ASM volatile ("MRS %0, control" : "=r" (result) );
00156   return(result);
00157 }
00158 
00159 
00160 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00161 /**
00162   \brief   Get Control Register (non-secure)
00163   \details Returns the content of the non-secure Control Register when in secure mode.
00164   \return               non-secure Control Register value
00165  */
00166 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
00167 {
00168   uint32_t result;
00169 
00170   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
00171   return(result);
00172 }
00173 #endif
00174 
00175 
00176 /**
00177   \brief   Set Control Register
00178   \details Writes the given value to the Control Register.
00179   \param [in]    control  Control Register value to set
00180  */
00181 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
00182 {
00183   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
00184 }
00185 
00186 
00187 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00188 /**
00189   \brief   Set Control Register (non-secure)
00190   \details Writes the given value to the non-secure Control Register when in secure state.
00191   \param [in]    control  Control Register value to set
00192  */
00193 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
00194 {
00195   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
00196 }
00197 #endif
00198 
00199 
00200 /**
00201   \brief   Get IPSR Register
00202   \details Returns the content of the IPSR Register.
00203   \return               IPSR Register value
00204  */
00205 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
00206 {
00207   uint32_t result;
00208 
00209   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
00210   return(result);
00211 }
00212 
00213 
00214 /**
00215   \brief   Get APSR Register
00216   \details Returns the content of the APSR Register.
00217   \return               APSR Register value
00218  */
00219 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
00220 {
00221   uint32_t result;
00222 
00223   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
00224   return(result);
00225 }
00226 
00227 
00228 /**
00229   \brief   Get xPSR Register
00230   \details Returns the content of the xPSR Register.
00231   \return               xPSR Register value
00232  */
00233 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
00234 {
00235   uint32_t result;
00236 
00237   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
00238   return(result);
00239 }
00240 
00241 
00242 /**
00243   \brief   Get Process Stack Pointer
00244   \details Returns the current value of the Process Stack Pointer (PSP).
00245   \return               PSP Register value
00246  */
00247 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
00248 {
00249   register uint32_t result;
00250 
00251   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
00252   return(result);
00253 }
00254 
00255 
00256 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00257 /**
00258   \brief   Get Process Stack Pointer (non-secure)
00259   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
00260   \return               PSP Register value
00261  */
00262 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
00263 {
00264   register uint32_t result;
00265 
00266   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
00267   return(result);
00268 }
00269 #endif
00270 
00271 
00272 /**
00273   \brief   Set Process Stack Pointer
00274   \details Assigns the given value to the Process Stack Pointer (PSP).
00275   \param [in]    topOfProcStack  Process Stack Pointer value to set
00276  */
00277 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
00278 {
00279   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
00280 }
00281 
00282 
00283 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00284 /**
00285   \brief   Set Process Stack Pointer (non-secure)
00286   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
00287   \param [in]    topOfProcStack  Process Stack Pointer value to set
00288  */
00289 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
00290 {
00291   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
00292 }
00293 #endif
00294 
00295 
00296 /**
00297   \brief   Get Main Stack Pointer
00298   \details Returns the current value of the Main Stack Pointer (MSP).
00299   \return               MSP Register value
00300  */
00301 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
00302 {
00303   register uint32_t result;
00304 
00305   __ASM volatile ("MRS %0, msp" : "=r" (result) );
00306   return(result);
00307 }
00308 
00309 
00310 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00311 /**
00312   \brief   Get Main Stack Pointer (non-secure)
00313   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
00314   \return               MSP Register value
00315  */
00316 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
00317 {
00318   register uint32_t result;
00319 
00320   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
00321   return(result);
00322 }
00323 #endif
00324 
00325 
00326 /**
00327   \brief   Set Main Stack Pointer
00328   \details Assigns the given value to the Main Stack Pointer (MSP).
00329   \param [in]    topOfMainStack  Main Stack Pointer value to set
00330  */
00331 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
00332 {
00333   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
00334 }
00335 
00336 
00337 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00338 /**
00339   \brief   Set Main Stack Pointer (non-secure)
00340   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
00341   \param [in]    topOfMainStack  Main Stack Pointer value to set
00342  */
00343 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
00344 {
00345   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
00346 }
00347 #endif
00348 
00349 
00350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00351 /**
00352   \brief   Get Stack Pointer (non-secure)
00353   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
00354   \return               SP Register value
00355  */
00356 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
00357 {
00358   register uint32_t result;
00359 
00360   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
00361   return(result);
00362 }
00363 
00364 
00365 /**
00366   \brief   Set Stack Pointer (non-secure)
00367   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
00368   \param [in]    topOfStack  Stack Pointer value to set
00369  */
00370 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
00371 {
00372   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
00373 }
00374 #endif
00375 
00376 
00377 /**
00378   \brief   Get Priority Mask
00379   \details Returns the current state of the priority mask bit from the Priority Mask Register.
00380   \return               Priority Mask value
00381  */
00382 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
00383 {
00384   uint32_t result;
00385 
00386   __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
00387   return(result);
00388 }
00389 
00390 
00391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00392 /**
00393   \brief   Get Priority Mask (non-secure)
00394   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
00395   \return               Priority Mask value
00396  */
00397 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
00398 {
00399   uint32_t result;
00400 
00401   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
00402   return(result);
00403 }
00404 #endif
00405 
00406 
00407 /**
00408   \brief   Set Priority Mask
00409   \details Assigns the given value to the Priority Mask Register.
00410   \param [in]    priMask  Priority Mask
00411  */
00412 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
00413 {
00414   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
00415 }
00416 
00417 
00418 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00419 /**
00420   \brief   Set Priority Mask (non-secure)
00421   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
00422   \param [in]    priMask  Priority Mask
00423  */
00424 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
00425 {
00426   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
00427 }
00428 #endif
00429 
00430 
00431 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00432      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00433      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
00434 /**
00435   \brief   Enable FIQ
00436   \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
00437            Can only be executed in Privileged modes.
00438  */
00439 __STATIC_FORCEINLINE void __enable_fault_irq(void)
00440 {
00441   __ASM volatile ("cpsie f" : : : "memory");
00442 }
00443 
00444 
00445 /**
00446   \brief   Disable FIQ
00447   \details Disables FIQ interrupts by setting the F-bit in the CPSR.
00448            Can only be executed in Privileged modes.
00449  */
00450 __STATIC_FORCEINLINE void __disable_fault_irq(void)
00451 {
00452   __ASM volatile ("cpsid f" : : : "memory");
00453 }
00454 
00455 
00456 /**
00457   \brief   Get Base Priority
00458   \details Returns the current value of the Base Priority register.
00459   \return               Base Priority register value
00460  */
00461 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
00462 {
00463   uint32_t result;
00464 
00465   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
00466   return(result);
00467 }
00468 
00469 
00470 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00471 /**
00472   \brief   Get Base Priority (non-secure)
00473   \details Returns the current value of the non-secure Base Priority register when in secure state.
00474   \return               Base Priority register value
00475  */
00476 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
00477 {
00478   uint32_t result;
00479 
00480   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
00481   return(result);
00482 }
00483 #endif
00484 
00485 
00486 /**
00487   \brief   Set Base Priority
00488   \details Assigns the given value to the Base Priority register.
00489   \param [in]    basePri  Base Priority value to set
00490  */
00491 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
00492 {
00493   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
00494 }
00495 
00496 
00497 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00498 /**
00499   \brief   Set Base Priority (non-secure)
00500   \details Assigns the given value to the non-secure Base Priority register when in secure state.
00501   \param [in]    basePri  Base Priority value to set
00502  */
00503 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
00504 {
00505   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
00506 }
00507 #endif
00508 
00509 
00510 /**
00511   \brief   Set Base Priority with condition
00512   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
00513            or the new value increases the BASEPRI priority level.
00514   \param [in]    basePri  Base Priority value to set
00515  */
00516 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
00517 {
00518   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
00519 }
00520 
00521 
00522 /**
00523   \brief   Get Fault Mask
00524   \details Returns the current value of the Fault Mask register.
00525   \return               Fault Mask register value
00526  */
00527 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
00528 {
00529   uint32_t result;
00530 
00531   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
00532   return(result);
00533 }
00534 
00535 
00536 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00537 /**
00538   \brief   Get Fault Mask (non-secure)
00539   \details Returns the current value of the non-secure Fault Mask register when in secure state.
00540   \return               Fault Mask register value
00541  */
00542 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
00543 {
00544   uint32_t result;
00545 
00546   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
00547   return(result);
00548 }
00549 #endif
00550 
00551 
00552 /**
00553   \brief   Set Fault Mask
00554   \details Assigns the given value to the Fault Mask register.
00555   \param [in]    faultMask  Fault Mask value to set
00556  */
00557 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
00558 {
00559   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
00560 }
00561 
00562 
00563 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00564 /**
00565   \brief   Set Fault Mask (non-secure)
00566   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
00567   \param [in]    faultMask  Fault Mask value to set
00568  */
00569 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
00570 {
00571   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
00572 }
00573 #endif
00574 
00575 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00576            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00577            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
00578 
00579 
00580 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00581      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
00582 
00583 /**
00584   \brief   Get Process Stack Pointer Limit
00585   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00586   Stack Pointer Limit register hence zero is returned always in non-secure
00587   mode.
00588   
00589   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
00590   \return               PSPLIM Register value
00591  */
00592 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
00593 {
00594 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00595     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00596     // without main extensions, the non-secure PSPLIM is RAZ/WI
00597   return 0U;
00598 #else
00599   register uint32_t result;
00600   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
00601   return result;
00602 #endif
00603 }
00604 
00605 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
00606 /**
00607   \brief   Get Process Stack Pointer Limit (non-secure)
00608   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00609   Stack Pointer Limit register hence zero is returned always.
00610 
00611   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
00612   \return               PSPLIM Register value
00613  */
00614 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
00615 {
00616 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00617   // without main extensions, the non-secure PSPLIM is RAZ/WI
00618   return 0U;
00619 #else
00620   register uint32_t result;
00621   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
00622   return result;
00623 #endif
00624 }
00625 #endif
00626 
00627 
00628 /**
00629   \brief   Set Process Stack Pointer Limit
00630   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00631   Stack Pointer Limit register hence the write is silently ignored in non-secure
00632   mode.
00633   
00634   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
00635   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
00636  */
00637 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
00638 {
00639 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00640     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00641   // without main extensions, the non-secure PSPLIM is RAZ/WI
00642   (void)ProcStackPtrLimit;
00643 #else
00644   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
00645 #endif
00646 }
00647 
00648 
00649 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00650 /**
00651   \brief   Set Process Stack Pointer (non-secure)
00652   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00653   Stack Pointer Limit register hence the write is silently ignored.
00654 
00655   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
00656   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
00657  */
00658 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
00659 {
00660 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00661   // without main extensions, the non-secure PSPLIM is RAZ/WI
00662   (void)ProcStackPtrLimit;
00663 #else
00664   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
00665 #endif
00666 }
00667 #endif
00668 
00669 
00670 /**
00671   \brief   Get Main Stack Pointer Limit
00672   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00673   Stack Pointer Limit register hence zero is returned always in non-secure
00674   mode.
00675 
00676   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
00677   \return               MSPLIM Register value
00678  */
00679 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
00680 {
00681 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00682     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00683   // without main extensions, the non-secure MSPLIM is RAZ/WI
00684   return 0U;
00685 #else
00686   register uint32_t result;
00687   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
00688   return result;
00689 #endif
00690 }
00691 
00692 
00693 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00694 /**
00695   \brief   Get Main Stack Pointer Limit (non-secure)
00696   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00697   Stack Pointer Limit register hence zero is returned always.
00698 
00699   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
00700   \return               MSPLIM Register value
00701  */
00702 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
00703 {
00704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00705   // without main extensions, the non-secure MSPLIM is RAZ/WI
00706   return 0U;
00707 #else
00708   register uint32_t result;
00709   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
00710   return result;
00711 #endif
00712 }
00713 #endif
00714 
00715 
00716 /**
00717   \brief   Set Main Stack Pointer Limit
00718   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00719   Stack Pointer Limit register hence the write is silently ignored in non-secure
00720   mode.
00721 
00722   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
00723   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
00724  */
00725 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
00726 {
00727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00728     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00729   // without main extensions, the non-secure MSPLIM is RAZ/WI
00730   (void)MainStackPtrLimit;
00731 #else
00732   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
00733 #endif
00734 }
00735 
00736 
00737 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00738 /**
00739   \brief   Set Main Stack Pointer Limit (non-secure)
00740   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00741   Stack Pointer Limit register hence the write is silently ignored.
00742 
00743   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
00744   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
00745  */
00746 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
00747 {
00748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00749   // without main extensions, the non-secure MSPLIM is RAZ/WI
00750   (void)MainStackPtrLimit;
00751 #else
00752   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
00753 #endif
00754 }
00755 #endif
00756 
00757 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00758            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
00759 
00760 
00761 #if ((defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00762      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
00763 
00764 /**
00765   \brief   Get FPSCR
00766   \details Returns the current value of the Floating Point Status/Control register.
00767   \return               Floating Point Status/Control register value
00768  */
00769 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
00770 {
00771 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
00772      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
00773 #if __has_builtin(__builtin_arm_get_fpscr) || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
00774   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
00775   return __builtin_arm_get_fpscr();
00776 #else
00777   uint32_t result;
00778 
00779   __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
00780   return(result);
00781 #endif
00782 #else
00783   return(0U);
00784 #endif
00785 }
00786 
00787 
00788 /**
00789   \brief   Set FPSCR
00790   \details Assigns the given value to the Floating Point Status/Control register.
00791   \param [in]    fpscr  Floating Point Status/Control value to set
00792  */
00793 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
00794 {
00795 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
00796      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
00797 #if __has_builtin(__builtin_arm_set_fpscr) || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
00798   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
00799   __builtin_arm_set_fpscr(fpscr);
00800 #else
00801   __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
00802 #endif
00803 #else
00804   (void)fpscr;
00805 #endif
00806 }
00807 
00808 #endif /* ((defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00809            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
00810 
00811 
00812 
00813 /*@} end of CMSIS_Core_RegAccFunctions */
00814 
00815 
00816 /* ##########################  Core Instruction Access  ######################### */
00817 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
00818   Access to dedicated instructions
00819   @{
00820 */
00821 
00822 /* Define macros for porting to both thumb1 and thumb2.
00823  * For thumb1, use low register (r0-r7), specified by constraint "l"
00824  * Otherwise, use general registers, specified by constraint "r" */
00825 #if defined (__thumb__) && !defined (__thumb2__)
00826 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
00827 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
00828 #define __CMSIS_GCC_USE_REG(r) "l" (r)
00829 #else
00830 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
00831 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
00832 #define __CMSIS_GCC_USE_REG(r) "r" (r)
00833 #endif
00834 
00835 /**
00836   \brief   No Operation
00837   \details No Operation does nothing. This instruction can be used for code alignment purposes.
00838  */
00839 #define __NOP()                             __ASM volatile ("nop")
00840 
00841 /**
00842   \brief   Wait For Interrupt
00843   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
00844  */
00845 #define __WFI()                             __ASM volatile ("wfi")
00846 
00847 
00848 /**
00849   \brief   Wait For Event
00850   \details Wait For Event is a hint instruction that permits the processor to enter
00851            a low-power state until one of a number of events occurs.
00852  */
00853 #define __WFE()                             __ASM volatile ("wfe")
00854 
00855 
00856 /**
00857   \brief   Send Event
00858   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
00859  */
00860 #define __SEV()                             __ASM volatile ("sev")
00861 
00862 
00863 /**
00864   \brief   Instruction Synchronization Barrier
00865   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
00866            so that all instructions following the ISB are fetched from cache or memory,
00867            after the instruction has been completed.
00868  */
00869 __STATIC_FORCEINLINE void __ISB(void)
00870 {
00871   __ASM volatile ("isb 0xF":::"memory");
00872 }
00873 
00874 
00875 /**
00876   \brief   Data Synchronization Barrier
00877   \details Acts as a special kind of Data Memory Barrier.
00878            It completes when all explicit memory accesses before this instruction complete.
00879  */
00880 __STATIC_FORCEINLINE void __DSB(void)
00881 {
00882   __ASM volatile ("dsb 0xF":::"memory");
00883 }
00884 
00885 
00886 /**
00887   \brief   Data Memory Barrier
00888   \details Ensures the apparent order of the explicit memory operations before
00889            and after the instruction, without ensuring their completion.
00890  */
00891 __STATIC_FORCEINLINE void __DMB(void)
00892 {
00893   __ASM volatile ("dmb 0xF":::"memory");
00894 }
00895 
00896 
00897 /**
00898   \brief   Reverse byte order (32 bit)
00899   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
00900   \param [in]    value  Value to reverse
00901   \return               Reversed value
00902  */
00903 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
00904 {
00905 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
00906   return __builtin_bswap32(value);
00907 #else
00908   uint32_t result;
00909 
00910   __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00911   return result;
00912 #endif
00913 }
00914 
00915 
00916 /**
00917   \brief   Reverse byte order (16 bit)
00918   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
00919   \param [in]    value  Value to reverse
00920   \return               Reversed value
00921  */
00922 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
00923 {
00924   uint32_t result;
00925 
00926   __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00927   return result;
00928 }
00929 
00930 
00931 /**
00932   \brief   Reverse byte order (16 bit)
00933   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
00934   \param [in]    value  Value to reverse
00935   \return               Reversed value
00936  */
00937 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
00938 {
00939 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
00940   return (int16_t)__builtin_bswap16(value);
00941 #else
00942   int16_t result;
00943 
00944   __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00945   return result;
00946 #endif
00947 }
00948 
00949 
00950 /**
00951   \brief   Rotate Right in unsigned value (32 bit)
00952   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
00953   \param [in]    op1  Value to rotate
00954   \param [in]    op2  Number of Bits to rotate
00955   \return               Rotated value
00956  */
00957 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
00958 {
00959   op2 %= 32U;
00960   if (op2 == 0U)
00961   {
00962     return op1;
00963   }
00964   return (op1 >> op2) | (op1 << (32U - op2));
00965 }
00966 
00967 
00968 /**
00969   \brief   Breakpoint
00970   \details Causes the processor to enter Debug state.
00971            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
00972   \param [in]    value  is ignored by the processor.
00973                  If required, a debugger can use it to store additional information about the breakpoint.
00974  */
00975 #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
00976 
00977 
00978 /**
00979   \brief   Reverse bit order of value
00980   \details Reverses the bit order of the given value.
00981   \param [in]    value  Value to reverse
00982   \return               Reversed value
00983  */
00984 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
00985 {
00986   uint32_t result;
00987 
00988 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00989      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00990      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
00991    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
00992 #else
00993   uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
00994 
00995   result = value;                      /* r will be reversed bits of v; first get LSB of v */
00996   for (value >>= 1U; value != 0U; value >>= 1U)
00997   {
00998     result <<= 1U;
00999     result |= value & 1U;
01000     s--;
01001   }
01002   result <<= s;                        /* shift when v's highest bits are zero */
01003 #endif
01004   return result;
01005 }
01006 
01007 
01008 /**
01009   \brief   Count leading zeros
01010   \details Counts the number of leading zeros of a data value.
01011   \param [in]  value  Value to count the leading zeros
01012   \return             number of leading zeros in value
01013  */
01014 #define __CLZ             (uint8_t)__builtin_clz
01015 
01016 
01017 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01018      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01019      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01020      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
01021 /**
01022   \brief   LDR Exclusive (8 bit)
01023   \details Executes a exclusive LDR instruction for 8 bit value.
01024   \param [in]    ptr  Pointer to data
01025   \return             value of type uint8_t at (*ptr)
01026  */
01027 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
01028 {
01029     uint32_t result;
01030 
01031 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01032    __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
01033 #else
01034     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01035        accepted by assembler. So has to use following less efficient pattern.
01036     */
01037    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
01038 #endif
01039    return ((uint8_t) result);    /* Add explicit type cast here */
01040 }
01041 
01042 
01043 /**
01044   \brief   LDR Exclusive (16 bit)
01045   \details Executes a exclusive LDR instruction for 16 bit values.
01046   \param [in]    ptr  Pointer to data
01047   \return        value of type uint16_t at (*ptr)
01048  */
01049 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
01050 {
01051     uint32_t result;
01052 
01053 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01054    __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
01055 #else
01056     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01057        accepted by assembler. So has to use following less efficient pattern.
01058     */
01059    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
01060 #endif
01061    return ((uint16_t) result);    /* Add explicit type cast here */
01062 }
01063 
01064 
01065 /**
01066   \brief   LDR Exclusive (32 bit)
01067   \details Executes a exclusive LDR instruction for 32 bit values.
01068   \param [in]    ptr  Pointer to data
01069   \return        value of type uint32_t at (*ptr)
01070  */
01071 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
01072 {
01073     uint32_t result;
01074 
01075    __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
01076    return(result);
01077 }
01078 
01079 
01080 /**
01081   \brief   STR Exclusive (8 bit)
01082   \details Executes a exclusive STR instruction for 8 bit values.
01083   \param [in]  value  Value to store
01084   \param [in]    ptr  Pointer to location
01085   \return          0  Function succeeded
01086   \return          1  Function failed
01087  */
01088 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
01089 {
01090    uint32_t result;
01091 
01092    __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
01093    return(result);
01094 }
01095 
01096 
01097 /**
01098   \brief   STR Exclusive (16 bit)
01099   \details Executes a exclusive STR instruction for 16 bit values.
01100   \param [in]  value  Value to store
01101   \param [in]    ptr  Pointer to location
01102   \return          0  Function succeeded
01103   \return          1  Function failed
01104  */
01105 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
01106 {
01107    uint32_t result;
01108 
01109    __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
01110    return(result);
01111 }
01112 
01113 
01114 /**
01115   \brief   STR Exclusive (32 bit)
01116   \details Executes a exclusive STR instruction for 32 bit values.
01117   \param [in]  value  Value to store
01118   \param [in]    ptr  Pointer to location
01119   \return          0  Function succeeded
01120   \return          1  Function failed
01121  */
01122 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
01123 {
01124    uint32_t result;
01125 
01126    __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
01127    return(result);
01128 }
01129 
01130 
01131 /**
01132   \brief   Remove the exclusive lock
01133   \details Removes the exclusive lock which is created by LDREX.
01134  */
01135 __STATIC_FORCEINLINE void __CLREX(void)
01136 {
01137   __ASM volatile ("clrex" ::: "memory");
01138 }
01139 
01140 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01141            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01142            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01143            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
01144 
01145 
01146 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01147      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01148      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
01149 /**
01150   \brief   Signed Saturate
01151   \details Saturates a signed value.
01152   \param [in]  ARG1  Value to be saturated
01153   \param [in]  ARG2  Bit position to saturate to (1..32)
01154   \return             Saturated value
01155  */
01156 #define __SSAT(ARG1,ARG2) \
01157 __extension__ \
01158 ({                          \
01159   int32_t __RES, __ARG1 = (ARG1); \
01160   __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
01161   __RES; \
01162  })
01163 
01164 
01165 /**
01166   \brief   Unsigned Saturate
01167   \details Saturates an unsigned value.
01168   \param [in]  ARG1  Value to be saturated
01169   \param [in]  ARG2  Bit position to saturate to (0..31)
01170   \return             Saturated value
01171  */
01172 #define __USAT(ARG1,ARG2) \
01173  __extension__ \
01174 ({                          \
01175   uint32_t __RES, __ARG1 = (ARG1); \
01176   __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
01177   __RES; \
01178  })
01179 
01180 
01181 /**
01182   \brief   Rotate Right with Extend (32 bit)
01183   \details Moves each bit of a bitstring right by one bit.
01184            The carry input is shifted in at the left end of the bitstring.
01185   \param [in]    value  Value to rotate
01186   \return               Rotated value
01187  */
01188 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
01189 {
01190   uint32_t result;
01191 
01192   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
01193   return(result);
01194 }
01195 
01196 
01197 /**
01198   \brief   LDRT Unprivileged (8 bit)
01199   \details Executes a Unprivileged LDRT instruction for 8 bit value.
01200   \param [in]    ptr  Pointer to data
01201   \return             value of type uint8_t at (*ptr)
01202  */
01203 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
01204 {
01205     uint32_t result;
01206 
01207 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01208    __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
01209 #else
01210     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01211        accepted by assembler. So has to use following less efficient pattern.
01212     */
01213    __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
01214 #endif
01215    return ((uint8_t) result);    /* Add explicit type cast here */
01216 }
01217 
01218 
01219 /**
01220   \brief   LDRT Unprivileged (16 bit)
01221   \details Executes a Unprivileged LDRT instruction for 16 bit values.
01222   \param [in]    ptr  Pointer to data
01223   \return        value of type uint16_t at (*ptr)
01224  */
01225 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
01226 {
01227     uint32_t result;
01228 
01229 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01230    __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
01231 #else
01232     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01233        accepted by assembler. So has to use following less efficient pattern.
01234     */
01235    __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
01236 #endif
01237    return ((uint16_t) result);    /* Add explicit type cast here */
01238 }
01239 
01240 
01241 /**
01242   \brief   LDRT Unprivileged (32 bit)
01243   \details Executes a Unprivileged LDRT instruction for 32 bit values.
01244   \param [in]    ptr  Pointer to data
01245   \return        value of type uint32_t at (*ptr)
01246  */
01247 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
01248 {
01249     uint32_t result;
01250 
01251    __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
01252    return(result);
01253 }
01254 
01255 
01256 /**
01257   \brief   STRT Unprivileged (8 bit)
01258   \details Executes a Unprivileged STRT instruction for 8 bit values.
01259   \param [in]  value  Value to store
01260   \param [in]    ptr  Pointer to location
01261  */
01262 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
01263 {
01264    __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01265 }
01266 
01267 
01268 /**
01269   \brief   STRT Unprivileged (16 bit)
01270   \details Executes a Unprivileged STRT instruction for 16 bit values.
01271   \param [in]  value  Value to store
01272   \param [in]    ptr  Pointer to location
01273  */
01274 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
01275 {
01276    __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01277 }
01278 
01279 
01280 /**
01281   \brief   STRT Unprivileged (32 bit)
01282   \details Executes a Unprivileged STRT instruction for 32 bit values.
01283   \param [in]  value  Value to store
01284   \param [in]    ptr  Pointer to location
01285  */
01286 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
01287 {
01288    __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
01289 }
01290 
01291 #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01292            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01293            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
01294 
01295 /**
01296   \brief   Signed Saturate
01297   \details Saturates a signed value.
01298   \param [in]  value  Value to be saturated
01299   \param [in]    sat  Bit position to saturate to (1..32)
01300   \return             Saturated value
01301  */
01302 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
01303 {
01304   if ((sat >= 1U) && (sat <= 32U))
01305   {
01306     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
01307     const int32_t min = -1 - max ;
01308     if (val > max)
01309     {
01310       return max;
01311     }
01312     else if (val < min)
01313     {
01314       return min;
01315     }
01316   }
01317   return val;
01318 }
01319 
01320 /**
01321   \brief   Unsigned Saturate
01322   \details Saturates an unsigned value.
01323   \param [in]  value  Value to be saturated
01324   \param [in]    sat  Bit position to saturate to (0..31)
01325   \return             Saturated value
01326  */
01327 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
01328 {
01329   if (sat <= 31U)
01330   {
01331     const uint32_t max = ((1U << sat) - 1U);
01332     if (val > (int32_t)max)
01333     {
01334       return max;
01335     }
01336     else if (val < 0)
01337     {
01338       return 0U;
01339     }
01340   }
01341   return (uint32_t)val;
01342 }
01343 
01344 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01345            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01346            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
01347 
01348 
01349 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01350      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
01351 /**
01352   \brief   Load-Acquire (8 bit)
01353   \details Executes a LDAB instruction for 8 bit value.
01354   \param [in]    ptr  Pointer to data
01355   \return             value of type uint8_t at (*ptr)
01356  */
01357 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
01358 {
01359     uint32_t result;
01360 
01361    __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
01362    return ((uint8_t) result);
01363 }
01364 
01365 
01366 /**
01367   \brief   Load-Acquire (16 bit)
01368   \details Executes a LDAH instruction for 16 bit values.
01369   \param [in]    ptr  Pointer to data
01370   \return        value of type uint16_t at (*ptr)
01371  */
01372 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
01373 {
01374     uint32_t result;
01375 
01376    __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
01377    return ((uint16_t) result);
01378 }
01379 
01380 
01381 /**
01382   \brief   Load-Acquire (32 bit)
01383   \details Executes a LDA instruction for 32 bit values.
01384   \param [in]    ptr  Pointer to data
01385   \return        value of type uint32_t at (*ptr)
01386  */
01387 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
01388 {
01389     uint32_t result;
01390 
01391    __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
01392    return(result);
01393 }
01394 
01395 
01396 /**
01397   \brief   Store-Release (8 bit)
01398   \details Executes a STLB instruction for 8 bit values.
01399   \param [in]  value  Value to store
01400   \param [in]    ptr  Pointer to location
01401  */
01402 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
01403 {
01404    __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01405 }
01406 
01407 
01408 /**
01409   \brief   Store-Release (16 bit)
01410   \details Executes a STLH instruction for 16 bit values.
01411   \param [in]  value  Value to store
01412   \param [in]    ptr  Pointer to location
01413  */
01414 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
01415 {
01416    __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01417 }
01418 
01419 
01420 /**
01421   \brief   Store-Release (32 bit)
01422   \details Executes a STL instruction for 32 bit values.
01423   \param [in]  value  Value to store
01424   \param [in]    ptr  Pointer to location
01425  */
01426 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
01427 {
01428    __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01429 }
01430 
01431 
01432 /**
01433   \brief   Load-Acquire Exclusive (8 bit)
01434   \details Executes a LDAB exclusive instruction for 8 bit value.
01435   \param [in]    ptr  Pointer to data
01436   \return             value of type uint8_t at (*ptr)
01437  */
01438 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
01439 {
01440     uint32_t result;
01441 
01442    __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
01443    return ((uint8_t) result);
01444 }
01445 
01446 
01447 /**
01448   \brief   Load-Acquire Exclusive (16 bit)
01449   \details Executes a LDAH exclusive instruction for 16 bit values.
01450   \param [in]    ptr  Pointer to data
01451   \return        value of type uint16_t at (*ptr)
01452  */
01453 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
01454 {
01455     uint32_t result;
01456 
01457    __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
01458    return ((uint16_t) result);
01459 }
01460 
01461 
01462 /**
01463   \brief   Load-Acquire Exclusive (32 bit)
01464   \details Executes a LDA exclusive instruction for 32 bit values.
01465   \param [in]    ptr  Pointer to data
01466   \return        value of type uint32_t at (*ptr)
01467  */
01468 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
01469 {
01470     uint32_t result;
01471 
01472    __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
01473    return(result);
01474 }
01475 
01476 
01477 /**
01478   \brief   Store-Release Exclusive (8 bit)
01479   \details Executes a STLB exclusive instruction for 8 bit values.
01480   \param [in]  value  Value to store
01481   \param [in]    ptr  Pointer to location
01482   \return          0  Function succeeded
01483   \return          1  Function failed
01484  */
01485 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
01486 {
01487    uint32_t result;
01488 
01489    __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
01490    return(result);
01491 }
01492 
01493 
01494 /**
01495   \brief   Store-Release Exclusive (16 bit)
01496   \details Executes a STLH exclusive instruction for 16 bit values.
01497   \param [in]  value  Value to store
01498   \param [in]    ptr  Pointer to location
01499   \return          0  Function succeeded
01500   \return          1  Function failed
01501  */
01502 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
01503 {
01504    uint32_t result;
01505 
01506    __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
01507    return(result);
01508 }
01509 
01510 
01511 /**
01512   \brief   Store-Release Exclusive (32 bit)
01513   \details Executes a STL exclusive instruction for 32 bit values.
01514   \param [in]  value  Value to store
01515   \param [in]    ptr  Pointer to location
01516   \return          0  Function succeeded
01517   \return          1  Function failed
01518  */
01519 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
01520 {
01521    uint32_t result;
01522 
01523    __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
01524    return(result);
01525 }
01526 
01527 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01528            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
01529 
01530 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
01531 
01532 
01533 /* ###################  Compiler specific Intrinsics  ########################### */
01534 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
01535   Access to dedicated SIMD instructions
01536   @{
01537 */
01538 
01539 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
01540 
01541 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
01542 {
01543   uint32_t result;
01544 
01545   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01546   return(result);
01547 }
01548 
01549 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
01550 {
01551   uint32_t result;
01552 
01553   __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01554   return(result);
01555 }
01556 
01557 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
01558 {
01559   uint32_t result;
01560 
01561   __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01562   return(result);
01563 }
01564 
01565 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
01566 {
01567   uint32_t result;
01568 
01569   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01570   return(result);
01571 }
01572 
01573 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
01574 {
01575   uint32_t result;
01576 
01577   __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01578   return(result);
01579 }
01580 
01581 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
01582 {
01583   uint32_t result;
01584 
01585   __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01586   return(result);
01587 }
01588 
01589 
01590 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
01591 {
01592   uint32_t result;
01593 
01594   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01595   return(result);
01596 }
01597 
01598 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
01599 {
01600   uint32_t result;
01601 
01602   __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01603   return(result);
01604 }
01605 
01606 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
01607 {
01608   uint32_t result;
01609 
01610   __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01611   return(result);
01612 }
01613 
01614 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
01615 {
01616   uint32_t result;
01617 
01618   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01619   return(result);
01620 }
01621 
01622 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
01623 {
01624   uint32_t result;
01625 
01626   __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01627   return(result);
01628 }
01629 
01630 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
01631 {
01632   uint32_t result;
01633 
01634   __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01635   return(result);
01636 }
01637 
01638 
01639 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
01640 {
01641   uint32_t result;
01642 
01643   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01644   return(result);
01645 }
01646 
01647 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
01648 {
01649   uint32_t result;
01650 
01651   __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01652   return(result);
01653 }
01654 
01655 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
01656 {
01657   uint32_t result;
01658 
01659   __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01660   return(result);
01661 }
01662 
01663 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
01664 {
01665   uint32_t result;
01666 
01667   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01668   return(result);
01669 }
01670 
01671 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
01672 {
01673   uint32_t result;
01674 
01675   __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01676   return(result);
01677 }
01678 
01679 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
01680 {
01681   uint32_t result;
01682 
01683   __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01684   return(result);
01685 }
01686 
01687 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
01688 {
01689   uint32_t result;
01690 
01691   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01692   return(result);
01693 }
01694 
01695 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
01696 {
01697   uint32_t result;
01698 
01699   __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01700   return(result);
01701 }
01702 
01703 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
01704 {
01705   uint32_t result;
01706 
01707   __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01708   return(result);
01709 }
01710 
01711 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
01712 {
01713   uint32_t result;
01714 
01715   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01716   return(result);
01717 }
01718 
01719 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
01720 {
01721   uint32_t result;
01722 
01723   __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01724   return(result);
01725 }
01726 
01727 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
01728 {
01729   uint32_t result;
01730 
01731   __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01732   return(result);
01733 }
01734 
01735 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
01736 {
01737   uint32_t result;
01738 
01739   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01740   return(result);
01741 }
01742 
01743 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
01744 {
01745   uint32_t result;
01746 
01747   __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01748   return(result);
01749 }
01750 
01751 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
01752 {
01753   uint32_t result;
01754 
01755   __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01756   return(result);
01757 }
01758 
01759 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
01760 {
01761   uint32_t result;
01762 
01763   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01764   return(result);
01765 }
01766 
01767 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
01768 {
01769   uint32_t result;
01770 
01771   __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01772   return(result);
01773 }
01774 
01775 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
01776 {
01777   uint32_t result;
01778 
01779   __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01780   return(result);
01781 }
01782 
01783 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
01784 {
01785   uint32_t result;
01786 
01787   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01788   return(result);
01789 }
01790 
01791 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
01792 {
01793   uint32_t result;
01794 
01795   __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01796   return(result);
01797 }
01798 
01799 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
01800 {
01801   uint32_t result;
01802 
01803   __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01804   return(result);
01805 }
01806 
01807 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
01808 {
01809   uint32_t result;
01810 
01811   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01812   return(result);
01813 }
01814 
01815 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
01816 {
01817   uint32_t result;
01818 
01819   __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01820   return(result);
01821 }
01822 
01823 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
01824 {
01825   uint32_t result;
01826 
01827   __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01828   return(result);
01829 }
01830 
01831 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
01832 {
01833   uint32_t result;
01834 
01835   __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01836   return(result);
01837 }
01838 
01839 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
01840 {
01841   uint32_t result;
01842 
01843   __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01844   return(result);
01845 }
01846 
01847 #define __SSAT16(ARG1,ARG2) \
01848 ({                          \
01849   int32_t __RES, __ARG1 = (ARG1); \
01850   __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
01851   __RES; \
01852  })
01853 
01854 #define __USAT16(ARG1,ARG2) \
01855 ({                          \
01856   uint32_t __RES, __ARG1 = (ARG1); \
01857   __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
01858   __RES; \
01859  })
01860 
01861 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
01862 {
01863   uint32_t result;
01864 
01865   __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
01866   return(result);
01867 }
01868 
01869 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
01870 {
01871   uint32_t result;
01872 
01873   __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01874   return(result);
01875 }
01876 
01877 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
01878 {
01879   uint32_t result;
01880 
01881   __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
01882   return(result);
01883 }
01884 
01885 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
01886 {
01887   uint32_t result;
01888 
01889   __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01890   return(result);
01891 }
01892 
01893 __STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
01894 {
01895   uint32_t result;
01896 
01897   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01898   return(result);
01899 }
01900 
01901 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
01902 {
01903   uint32_t result;
01904 
01905   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01906   return(result);
01907 }
01908 
01909 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
01910 {
01911   uint32_t result;
01912 
01913   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01914   return(result);
01915 }
01916 
01917 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
01918 {
01919   uint32_t result;
01920 
01921   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01922   return(result);
01923 }
01924 
01925 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
01926 {
01927   union llreg_u{
01928     uint32_t w32[2];
01929     uint64_t w64;
01930   } llr;
01931   llr.w64 = acc;
01932 
01933 #ifndef __ARMEB__   /* Little endian */
01934   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
01935 #else               /* Big endian */
01936   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
01937 #endif
01938 
01939   return(llr.w64);
01940 }
01941 
01942 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
01943 {
01944   union llreg_u{
01945     uint32_t w32[2];
01946     uint64_t w64;
01947   } llr;
01948   llr.w64 = acc;
01949 
01950 #ifndef __ARMEB__   /* Little endian */
01951   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
01952 #else               /* Big endian */
01953   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
01954 #endif
01955 
01956   return(llr.w64);
01957 }
01958 
01959 __STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
01960 {
01961   uint32_t result;
01962 
01963   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01964   return(result);
01965 }
01966 
01967 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
01968 {
01969   uint32_t result;
01970 
01971   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01972   return(result);
01973 }
01974 
01975 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
01976 {
01977   uint32_t result;
01978 
01979   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01980   return(result);
01981 }
01982 
01983 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
01984 {
01985   uint32_t result;
01986 
01987   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01988   return(result);
01989 }
01990 
01991 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
01992 {
01993   union llreg_u{
01994     uint32_t w32[2];
01995     uint64_t w64;
01996   } llr;
01997   llr.w64 = acc;
01998 
01999 #ifndef __ARMEB__   /* Little endian */
02000   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
02001 #else               /* Big endian */
02002   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
02003 #endif
02004 
02005   return(llr.w64);
02006 }
02007 
02008 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
02009 {
02010   union llreg_u{
02011     uint32_t w32[2];
02012     uint64_t w64;
02013   } llr;
02014   llr.w64 = acc;
02015 
02016 #ifndef __ARMEB__   /* Little endian */
02017   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
02018 #else               /* Big endian */
02019   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
02020 #endif
02021 
02022   return(llr.w64);
02023 }
02024 
02025 __STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
02026 {
02027   uint32_t result;
02028 
02029   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02030   return(result);
02031 }
02032 
02033 __STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
02034 {
02035   int32_t result;
02036 
02037   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02038   return(result);
02039 }
02040 
02041 __STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
02042 {
02043   int32_t result;
02044 
02045   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02046   return(result);
02047 }
02048 
02049 #if 0
02050 #define __PKHBT(ARG1,ARG2,ARG3) \
02051 ({                          \
02052   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
02053   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
02054   __RES; \
02055  })
02056 
02057 #define __PKHTB(ARG1,ARG2,ARG3) \
02058 ({                          \
02059   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
02060   if (ARG3 == 0) \
02061     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
02062   else \
02063     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
02064   __RES; \
02065  })
02066 #endif
02067 
02068 #define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
02069                                            ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
02070 
02071 #define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
02072                                            ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
02073 
02074 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
02075 {
02076  int32_t result;
02077 
02078  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
02079  return(result);
02080 }
02081 
02082 #endif /* (__ARM_FEATURE_DSP == 1) */
02083 /*@} end of group CMSIS_SIMD_intrinsics */
02084 
02085 
02086 #pragma GCC diagnostic pop
02087 
02088 #endif /* __CMSIS_GCC_H */
02089