Arrow / Mbed OS DAPLink Reset
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers cmsis_gcc.h Source File

cmsis_gcc.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     cmsis_gcc.h
00003  * @brief    CMSIS compiler GCC header file
00004  * @version  V5.2.1
00005  * @date     30. July 2019
00006  ******************************************************************************/
00007 /*
00008  * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
00009  *
00010  * SPDX-License-Identifier: Apache-2.0
00011  *
00012  * Licensed under the Apache License, Version 2.0 (the License); you may
00013  * not use this file except in compliance with the License.
00014  * You may obtain a copy of the License at
00015  *
00016  * www.apache.org/licenses/LICENSE-2.0
00017  *
00018  * Unless required by applicable law or agreed to in writing, software
00019  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
00020  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00021  * See the License for the specific language governing permissions and
00022  * limitations under the License.
00023  */
00024 
00025 #ifndef __CMSIS_GCC_H
00026 #define __CMSIS_GCC_H
00027 
00028 /* ignore some GCC warnings */
00029 #pragma GCC diagnostic push
00030 #pragma GCC diagnostic ignored "-Wsign-conversion"
00031 #pragma GCC diagnostic ignored "-Wconversion"
00032 #pragma GCC diagnostic ignored "-Wunused-parameter"
00033 
00034 /* Fallback for __has_builtin */
00035 #ifndef __has_builtin
00036   #define __has_builtin(x) (0)
00037 #endif
00038 
00039 /* CMSIS compiler specific defines */
00040 #ifndef   __ASM
00041   #define __ASM                                  __asm
00042 #endif
00043 #ifndef   __INLINE
00044   #define __INLINE                               inline
00045 #endif
00046 #ifndef   __STATIC_INLINE
00047   #define __STATIC_INLINE                        static inline
00048 #endif
00049 #ifndef   __STATIC_FORCEINLINE                 
00050   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
00051 #endif                                           
00052 #ifndef   __NO_RETURN
00053   #define __NO_RETURN                            __attribute__((__noreturn__))
00054 #endif
00055 #ifndef   __USED
00056   #define __USED                                 __attribute__((used))
00057 #endif
00058 #ifndef   __WEAK
00059   #define __WEAK                                 __attribute__((weak))
00060 #endif
00061 #ifndef   __PACKED
00062   #define __PACKED                               __attribute__((packed, aligned(1)))
00063 #endif
00064 #ifndef   __PACKED_STRUCT
00065   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
00066 #endif
00067 #ifndef   __PACKED_UNION
00068   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
00069 #endif
00070 #ifndef   __UNALIGNED_UINT32        /* deprecated */
00071   #pragma GCC diagnostic push
00072   #pragma GCC diagnostic ignored "-Wpacked"
00073   #pragma GCC diagnostic ignored "-Wattributes"
00074   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
00075   #pragma GCC diagnostic pop
00076   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
00077 #endif
00078 #ifndef   __UNALIGNED_UINT16_WRITE
00079   #pragma GCC diagnostic push
00080   #pragma GCC diagnostic ignored "-Wpacked"
00081   #pragma GCC diagnostic ignored "-Wattributes"
00082   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
00083   #pragma GCC diagnostic pop
00084   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
00085 #endif
00086 #ifndef   __UNALIGNED_UINT16_READ
00087   #pragma GCC diagnostic push
00088   #pragma GCC diagnostic ignored "-Wpacked"
00089   #pragma GCC diagnostic ignored "-Wattributes"
00090   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
00091   #pragma GCC diagnostic pop
00092   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
00093 #endif
00094 #ifndef   __UNALIGNED_UINT32_WRITE
00095   #pragma GCC diagnostic push
00096   #pragma GCC diagnostic ignored "-Wpacked"
00097   #pragma GCC diagnostic ignored "-Wattributes"
00098   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
00099   #pragma GCC diagnostic pop
00100   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
00101 #endif
00102 #ifndef   __UNALIGNED_UINT32_READ
00103   #pragma GCC diagnostic push
00104   #pragma GCC diagnostic ignored "-Wpacked"
00105   #pragma GCC diagnostic ignored "-Wattributes"
00106   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
00107   #pragma GCC diagnostic pop
00108   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
00109 #endif
00110 #ifndef   __ALIGNED
00111   #define __ALIGNED(x)                           __attribute__((aligned(x)))
00112 #endif
00113 #ifndef   __RESTRICT
00114   #define __RESTRICT                             __restrict
00115 #endif
00116 #ifndef   __COMPILER_BARRIER
00117   #define __COMPILER_BARRIER()                   __ASM volatile("":::"memory")
00118 #endif
00119 
00120 /* #########################  Startup and Lowlevel Init  ######################## */
00121 
00122 #ifndef __PROGRAM_START
00123 
00124 /**
00125   \brief   Initializes data and bss sections
00126   \details This default implementations initialized all data and additional bss
00127            sections relying on .copy.table and .zero.table specified properly
00128            in the used linker script.
00129   
00130  */
00131 __STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
00132 {
00133   extern void _start(void) __NO_RETURN;
00134   
00135   typedef struct {
00136     uint32_t const* src;
00137     uint32_t* dest;
00138     uint32_t  wlen;
00139   } __copy_table_t;
00140   
00141   typedef struct {
00142     uint32_t* dest;
00143     uint32_t  wlen;
00144   } __zero_table_t;
00145   
00146   extern const __copy_table_t __copy_table_start__;
00147   extern const __copy_table_t __copy_table_end__;
00148   extern const __zero_table_t __zero_table_start__;
00149   extern const __zero_table_t __zero_table_end__;
00150 
00151   for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
00152     for(uint32_t i=0u; i<pTable->wlen; ++i) {
00153       pTable->dest[i] = pTable->src[i];
00154     }
00155   }
00156  
00157   for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
00158     for(uint32_t i=0u; i<pTable->wlen; ++i) {
00159       pTable->dest[i] = 0u;
00160     }
00161   }
00162  
00163   _start();
00164 }
00165   
00166 #define __PROGRAM_START           __cmsis_start
00167 #endif
00168 
00169 #ifndef __INITIAL_SP
00170 #define __INITIAL_SP              __StackTop
00171 #endif
00172 
00173 #ifndef __STACK_LIMIT
00174 #define __STACK_LIMIT             __StackLimit
00175 #endif
00176 
00177 #ifndef __VECTOR_TABLE
00178 #define __VECTOR_TABLE            __Vectors
00179 #endif
00180 
00181 #ifndef __VECTOR_TABLE_ATTRIBUTE
00182 #define __VECTOR_TABLE_ATTRIBUTE  __attribute((used, section(".vectors")))
00183 #endif
00184 
00185 /* ###########################  Core Function Access  ########################### */
00186 /** \ingroup  CMSIS_Core_FunctionInterface
00187     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00188   @{
00189  */
00190 
00191 /**
00192   \brief   Enable IRQ Interrupts
00193   \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
00194            Can only be executed in Privileged modes.
00195  */
00196 __STATIC_FORCEINLINE void __enable_irq(void)
00197 {
00198   __ASM volatile ("cpsie i" : : : "memory");
00199 }
00200 
00201 
00202 /**
00203   \brief   Disable IRQ Interrupts
00204   \details Disables IRQ interrupts by setting the I-bit in the CPSR.
00205            Can only be executed in Privileged modes.
00206  */
00207 __STATIC_FORCEINLINE void __disable_irq(void)
00208 {
00209   __ASM volatile ("cpsid i" : : : "memory");
00210 }
00211 
00212 
00213 /**
00214   \brief   Get Control Register
00215   \details Returns the content of the Control Register.
00216   \return               Control Register value
00217  */
00218 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
00219 {
00220   uint32_t result;
00221 
00222   __ASM volatile ("MRS %0, control" : "=r" (result) );
00223   return(result);
00224 }
00225 
00226 
00227 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00228 /**
00229   \brief   Get Control Register (non-secure)
00230   \details Returns the content of the non-secure Control Register when in secure mode.
00231   \return               non-secure Control Register value
00232  */
00233 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
00234 {
00235   uint32_t result;
00236 
00237   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
00238   return(result);
00239 }
00240 #endif
00241 
00242 
00243 /**
00244   \brief   Set Control Register
00245   \details Writes the given value to the Control Register.
00246   \param [in]    control  Control Register value to set
00247  */
00248 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
00249 {
00250   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
00251 }
00252 
00253 
00254 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00255 /**
00256   \brief   Set Control Register (non-secure)
00257   \details Writes the given value to the non-secure Control Register when in secure state.
00258   \param [in]    control  Control Register value to set
00259  */
00260 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
00261 {
00262   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
00263 }
00264 #endif
00265 
00266 
00267 /**
00268   \brief   Get IPSR Register
00269   \details Returns the content of the IPSR Register.
00270   \return               IPSR Register value
00271  */
00272 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
00273 {
00274   uint32_t result;
00275 
00276   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
00277   return(result);
00278 }
00279 
00280 
00281 /**
00282   \brief   Get APSR Register
00283   \details Returns the content of the APSR Register.
00284   \return               APSR Register value
00285  */
00286 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
00287 {
00288   uint32_t result;
00289 
00290   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
00291   return(result);
00292 }
00293 
00294 
00295 /**
00296   \brief   Get xPSR Register
00297   \details Returns the content of the xPSR Register.
00298   \return               xPSR Register value
00299  */
00300 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
00301 {
00302   uint32_t result;
00303 
00304   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
00305   return(result);
00306 }
00307 
00308 
00309 /**
00310   \brief   Get Process Stack Pointer
00311   \details Returns the current value of the Process Stack Pointer (PSP).
00312   \return               PSP Register value
00313  */
00314 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
00315 {
00316   uint32_t result;
00317 
00318   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
00319   return(result);
00320 }
00321 
00322 
00323 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00324 /**
00325   \brief   Get Process Stack Pointer (non-secure)
00326   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
00327   \return               PSP Register value
00328  */
00329 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
00330 {
00331   uint32_t result;
00332 
00333   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
00334   return(result);
00335 }
00336 #endif
00337 
00338 
00339 /**
00340   \brief   Set Process Stack Pointer
00341   \details Assigns the given value to the Process Stack Pointer (PSP).
00342   \param [in]    topOfProcStack  Process Stack Pointer value to set
00343  */
00344 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
00345 {
00346   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
00347 }
00348 
00349 
00350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00351 /**
00352   \brief   Set Process Stack Pointer (non-secure)
00353   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
00354   \param [in]    topOfProcStack  Process Stack Pointer value to set
00355  */
00356 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
00357 {
00358   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
00359 }
00360 #endif
00361 
00362 
00363 /**
00364   \brief   Get Main Stack Pointer
00365   \details Returns the current value of the Main Stack Pointer (MSP).
00366   \return               MSP Register value
00367  */
00368 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
00369 {
00370   uint32_t result;
00371 
00372   __ASM volatile ("MRS %0, msp" : "=r" (result) );
00373   return(result);
00374 }
00375 
00376 
00377 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00378 /**
00379   \brief   Get Main Stack Pointer (non-secure)
00380   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
00381   \return               MSP Register value
00382  */
00383 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
00384 {
00385   uint32_t result;
00386 
00387   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
00388   return(result);
00389 }
00390 #endif
00391 
00392 
00393 /**
00394   \brief   Set Main Stack Pointer
00395   \details Assigns the given value to the Main Stack Pointer (MSP).
00396   \param [in]    topOfMainStack  Main Stack Pointer value to set
00397  */
00398 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
00399 {
00400   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
00401 }
00402 
00403 
00404 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00405 /**
00406   \brief   Set Main Stack Pointer (non-secure)
00407   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
00408   \param [in]    topOfMainStack  Main Stack Pointer value to set
00409  */
00410 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
00411 {
00412   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
00413 }
00414 #endif
00415 
00416 
00417 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00418 /**
00419   \brief   Get Stack Pointer (non-secure)
00420   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
00421   \return               SP Register value
00422  */
00423 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
00424 {
00425   uint32_t result;
00426 
00427   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
00428   return(result);
00429 }
00430 
00431 
00432 /**
00433   \brief   Set Stack Pointer (non-secure)
00434   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
00435   \param [in]    topOfStack  Stack Pointer value to set
00436  */
00437 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
00438 {
00439   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
00440 }
00441 #endif
00442 
00443 
00444 /**
00445   \brief   Get Priority Mask
00446   \details Returns the current state of the priority mask bit from the Priority Mask Register.
00447   \return               Priority Mask value
00448  */
00449 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
00450 {
00451   uint32_t result;
00452 
00453   __ASM volatile ("MRS %0, primask" : "=r" (result) );
00454   return(result);
00455 }
00456 
00457 
00458 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00459 /**
00460   \brief   Get Priority Mask (non-secure)
00461   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
00462   \return               Priority Mask value
00463  */
00464 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
00465 {
00466   uint32_t result;
00467 
00468   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
00469   return(result);
00470 }
00471 #endif
00472 
00473 
00474 /**
00475   \brief   Set Priority Mask
00476   \details Assigns the given value to the Priority Mask Register.
00477   \param [in]    priMask  Priority Mask
00478  */
00479 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
00480 {
00481   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
00482 }
00483 
00484 
00485 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00486 /**
00487   \brief   Set Priority Mask (non-secure)
00488   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
00489   \param [in]    priMask  Priority Mask
00490  */
00491 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
00492 {
00493   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
00494 }
00495 #endif
00496 
00497 
00498 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00499      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00500      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
00501 /**
00502   \brief   Enable FIQ
00503   \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
00504            Can only be executed in Privileged modes.
00505  */
00506 __STATIC_FORCEINLINE void __enable_fault_irq(void)
00507 {
00508   __ASM volatile ("cpsie f" : : : "memory");
00509 }
00510 
00511 
00512 /**
00513   \brief   Disable FIQ
00514   \details Disables FIQ interrupts by setting the F-bit in the CPSR.
00515            Can only be executed in Privileged modes.
00516  */
00517 __STATIC_FORCEINLINE void __disable_fault_irq(void)
00518 {
00519   __ASM volatile ("cpsid f" : : : "memory");
00520 }
00521 
00522 
00523 /**
00524   \brief   Get Base Priority
00525   \details Returns the current value of the Base Priority register.
00526   \return               Base Priority register value
00527  */
00528 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
00529 {
00530   uint32_t result;
00531 
00532   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
00533   return(result);
00534 }
00535 
00536 
00537 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00538 /**
00539   \brief   Get Base Priority (non-secure)
00540   \details Returns the current value of the non-secure Base Priority register when in secure state.
00541   \return               Base Priority register value
00542  */
00543 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
00544 {
00545   uint32_t result;
00546 
00547   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
00548   return(result);
00549 }
00550 #endif
00551 
00552 
00553 /**
00554   \brief   Set Base Priority
00555   \details Assigns the given value to the Base Priority register.
00556   \param [in]    basePri  Base Priority value to set
00557  */
00558 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
00559 {
00560   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
00561 }
00562 
00563 
00564 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00565 /**
00566   \brief   Set Base Priority (non-secure)
00567   \details Assigns the given value to the non-secure Base Priority register when in secure state.
00568   \param [in]    basePri  Base Priority value to set
00569  */
00570 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
00571 {
00572   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
00573 }
00574 #endif
00575 
00576 
00577 /**
00578   \brief   Set Base Priority with condition
00579   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
00580            or the new value increases the BASEPRI priority level.
00581   \param [in]    basePri  Base Priority value to set
00582  */
00583 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
00584 {
00585   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
00586 }
00587 
00588 
00589 /**
00590   \brief   Get Fault Mask
00591   \details Returns the current value of the Fault Mask register.
00592   \return               Fault Mask register value
00593  */
00594 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
00595 {
00596   uint32_t result;
00597 
00598   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
00599   return(result);
00600 }
00601 
00602 
00603 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00604 /**
00605   \brief   Get Fault Mask (non-secure)
00606   \details Returns the current value of the non-secure Fault Mask register when in secure state.
00607   \return               Fault Mask register value
00608  */
00609 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
00610 {
00611   uint32_t result;
00612 
00613   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
00614   return(result);
00615 }
00616 #endif
00617 
00618 
00619 /**
00620   \brief   Set Fault Mask
00621   \details Assigns the given value to the Fault Mask register.
00622   \param [in]    faultMask  Fault Mask value to set
00623  */
00624 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
00625 {
00626   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
00627 }
00628 
00629 
00630 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
00631 /**
00632   \brief   Set Fault Mask (non-secure)
00633   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
00634   \param [in]    faultMask  Fault Mask value to set
00635  */
00636 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
00637 {
00638   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
00639 }
00640 #endif
00641 
00642 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
00643            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
00644            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
00645 
00646 
00647 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00648      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
00649 
00650 /**
00651   \brief   Get Process Stack Pointer Limit
00652   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00653   Stack Pointer Limit register hence zero is returned always in non-secure
00654   mode.
00655   
00656   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
00657   \return               PSPLIM Register value
00658  */
00659 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
00660 {
00661 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00662     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00663     // without main extensions, the non-secure PSPLIM is RAZ/WI
00664   return 0U;
00665 #else
00666   uint32_t result;
00667   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
00668   return result;
00669 #endif
00670 }
00671 
00672 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
00673 /**
00674   \brief   Get Process Stack Pointer Limit (non-secure)
00675   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00676   Stack Pointer Limit register hence zero is returned always.
00677 
00678   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
00679   \return               PSPLIM Register value
00680  */
00681 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
00682 {
00683 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00684   // without main extensions, the non-secure PSPLIM is RAZ/WI
00685   return 0U;
00686 #else
00687   uint32_t result;
00688   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
00689   return result;
00690 #endif
00691 }
00692 #endif
00693 
00694 
00695 /**
00696   \brief   Set Process Stack Pointer Limit
00697   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00698   Stack Pointer Limit register hence the write is silently ignored in non-secure
00699   mode.
00700   
00701   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
00702   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
00703  */
00704 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
00705 {
00706 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00707     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00708   // without main extensions, the non-secure PSPLIM is RAZ/WI
00709   (void)ProcStackPtrLimit;
00710 #else
00711   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
00712 #endif
00713 }
00714 
00715 
00716 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00717 /**
00718   \brief   Set Process Stack Pointer (non-secure)
00719   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00720   Stack Pointer Limit register hence the write is silently ignored.
00721 
00722   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
00723   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
00724  */
00725 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
00726 {
00727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00728   // without main extensions, the non-secure PSPLIM is RAZ/WI
00729   (void)ProcStackPtrLimit;
00730 #else
00731   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
00732 #endif
00733 }
00734 #endif
00735 
00736 
00737 /**
00738   \brief   Get Main Stack Pointer Limit
00739   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00740   Stack Pointer Limit register hence zero is returned always in non-secure
00741   mode.
00742 
00743   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
00744   \return               MSPLIM Register value
00745  */
00746 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
00747 {
00748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00749     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00750   // without main extensions, the non-secure MSPLIM is RAZ/WI
00751   return 0U;
00752 #else
00753   uint32_t result;
00754   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
00755   return result;
00756 #endif
00757 }
00758 
00759 
00760 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00761 /**
00762   \brief   Get Main Stack Pointer Limit (non-secure)
00763   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00764   Stack Pointer Limit register hence zero is returned always.
00765 
00766   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
00767   \return               MSPLIM Register value
00768  */
00769 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
00770 {
00771 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00772   // without main extensions, the non-secure MSPLIM is RAZ/WI
00773   return 0U;
00774 #else
00775   uint32_t result;
00776   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
00777   return result;
00778 #endif
00779 }
00780 #endif
00781 
00782 
00783 /**
00784   \brief   Set Main Stack Pointer Limit
00785   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00786   Stack Pointer Limit register hence the write is silently ignored in non-secure
00787   mode.
00788 
00789   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
00790   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
00791  */
00792 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
00793 {
00794 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
00795     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
00796   // without main extensions, the non-secure MSPLIM is RAZ/WI
00797   (void)MainStackPtrLimit;
00798 #else
00799   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
00800 #endif
00801 }
00802 
00803 
00804 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
00805 /**
00806   \brief   Set Main Stack Pointer Limit (non-secure)
00807   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
00808   Stack Pointer Limit register hence the write is silently ignored.
00809 
00810   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
00811   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
00812  */
00813 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
00814 {
00815 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
00816   // without main extensions, the non-secure MSPLIM is RAZ/WI
00817   (void)MainStackPtrLimit;
00818 #else
00819   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
00820 #endif
00821 }
00822 #endif
00823 
00824 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
00825            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
00826 
00827 
00828 /**
00829   \brief   Get FPSCR
00830   \details Returns the current value of the Floating Point Status/Control register.
00831   \return               Floating Point Status/Control register value
00832  */
00833 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
00834 {
00835 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
00836      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
00837 #if __has_builtin(__builtin_arm_get_fpscr) 
00838 // Re-enable using built-in when GCC has been fixed
00839 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
00840   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
00841   return __builtin_arm_get_fpscr();
00842 #else
00843   uint32_t result;
00844 
00845   __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
00846   return(result);
00847 #endif
00848 #else
00849   return(0U);
00850 #endif
00851 }
00852 
00853 
00854 /**
00855   \brief   Set FPSCR
00856   \details Assigns the given value to the Floating Point Status/Control register.
00857   \param [in]    fpscr  Floating Point Status/Control value to set
00858  */
00859 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
00860 {
00861 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
00862      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
00863 #if __has_builtin(__builtin_arm_set_fpscr)
00864 // Re-enable using built-in when GCC has been fixed
00865 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
00866   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
00867   __builtin_arm_set_fpscr(fpscr);
00868 #else
00869   __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
00870 #endif
00871 #else
00872   (void)fpscr;
00873 #endif
00874 }
00875 
00876 
00877 /*@} end of CMSIS_Core_RegAccFunctions */
00878 
00879 
00880 /* ##########################  Core Instruction Access  ######################### */
00881 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
00882   Access to dedicated instructions
00883   @{
00884 */
00885 
00886 /* Define macros for porting to both thumb1 and thumb2.
00887  * For thumb1, use low register (r0-r7), specified by constraint "l"
00888  * Otherwise, use general registers, specified by constraint "r" */
00889 #if defined (__thumb__) && !defined (__thumb2__)
00890 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
00891 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
00892 #define __CMSIS_GCC_USE_REG(r) "l" (r)
00893 #else
00894 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
00895 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
00896 #define __CMSIS_GCC_USE_REG(r) "r" (r)
00897 #endif
00898 
00899 /**
00900   \brief   No Operation
00901   \details No Operation does nothing. This instruction can be used for code alignment purposes.
00902  */
00903 #define __NOP()                             __ASM volatile ("nop")
00904 
00905 /**
00906   \brief   Wait For Interrupt
00907   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
00908  */
00909 #define __WFI()                             __ASM volatile ("wfi":::"memory")
00910 
00911 
00912 /**
00913   \brief   Wait For Event
00914   \details Wait For Event is a hint instruction that permits the processor to enter
00915            a low-power state until one of a number of events occurs.
00916  */
00917 #define __WFE()                             __ASM volatile ("wfe":::"memory")
00918 
00919 
00920 /**
00921   \brief   Send Event
00922   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
00923  */
00924 #define __SEV()                             __ASM volatile ("sev")
00925 
00926 
00927 /**
00928   \brief   Instruction Synchronization Barrier
00929   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
00930            so that all instructions following the ISB are fetched from cache or memory,
00931            after the instruction has been completed.
00932  */
00933 __STATIC_FORCEINLINE void __ISB(void)
00934 {
00935   __ASM volatile ("isb 0xF":::"memory");
00936 }
00937 
00938 
00939 /**
00940   \brief   Data Synchronization Barrier
00941   \details Acts as a special kind of Data Memory Barrier.
00942            It completes when all explicit memory accesses before this instruction complete.
00943  */
00944 __STATIC_FORCEINLINE void __DSB(void)
00945 {
00946   __ASM volatile ("dsb 0xF":::"memory");
00947 }
00948 
00949 
00950 /**
00951   \brief   Data Memory Barrier
00952   \details Ensures the apparent order of the explicit memory operations before
00953            and after the instruction, without ensuring their completion.
00954  */
00955 __STATIC_FORCEINLINE void __DMB(void)
00956 {
00957   __ASM volatile ("dmb 0xF":::"memory");
00958 }
00959 
00960 
00961 /**
00962   \brief   Reverse byte order (32 bit)
00963   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
00964   \param [in]    value  Value to reverse
00965   \return               Reversed value
00966  */
00967 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
00968 {
00969 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
00970   return __builtin_bswap32(value);
00971 #else
00972   uint32_t result;
00973 
00974   __ASM ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00975   return result;
00976 #endif
00977 }
00978 
00979 
00980 /**
00981   \brief   Reverse byte order (16 bit)
00982   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
00983   \param [in]    value  Value to reverse
00984   \return               Reversed value
00985  */
00986 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
00987 {
00988   uint32_t result;
00989 
00990   __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00991   return result;
00992 }
00993 
00994 
00995 /**
00996   \brief   Reverse byte order (16 bit)
00997   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
00998   \param [in]    value  Value to reverse
00999   \return               Reversed value
01000  */
01001 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
01002 {
01003 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01004   return (int16_t)__builtin_bswap16(value);
01005 #else
01006   int16_t result;
01007 
01008   __ASM ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
01009   return result;
01010 #endif
01011 }
01012 
01013 
01014 /**
01015   \brief   Rotate Right in unsigned value (32 bit)
01016   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
01017   \param [in]    op1  Value to rotate
01018   \param [in]    op2  Number of Bits to rotate
01019   \return               Rotated value
01020  */
01021 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
01022 {
01023   op2 %= 32U;
01024   if (op2 == 0U)
01025   {
01026     return op1;
01027   }
01028   return (op1 >> op2) | (op1 << (32U - op2));
01029 }
01030 
01031 
01032 /**
01033   \brief   Breakpoint
01034   \details Causes the processor to enter Debug state.
01035            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
01036   \param [in]    value  is ignored by the processor.
01037                  If required, a debugger can use it to store additional information about the breakpoint.
01038  */
01039 #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
01040 
01041 
01042 /**
01043   \brief   Reverse bit order of value
01044   \details Reverses the bit order of the given value.
01045   \param [in]    value  Value to reverse
01046   \return               Reversed value
01047  */
01048 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
01049 {
01050   uint32_t result;
01051 
01052 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01053      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01054      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
01055    __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
01056 #else
01057   uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
01058 
01059   result = value;                      /* r will be reversed bits of v; first get LSB of v */
01060   for (value >>= 1U; value != 0U; value >>= 1U)
01061   {
01062     result <<= 1U;
01063     result |= value & 1U;
01064     s--;
01065   }
01066   result <<= s;                        /* shift when v's highest bits are zero */
01067 #endif
01068   return result;
01069 }
01070 
01071 
01072 /**
01073   \brief   Count leading zeros
01074   \details Counts the number of leading zeros of a data value.
01075   \param [in]  value  Value to count the leading zeros
01076   \return             number of leading zeros in value
01077  */
01078 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
01079 {
01080   /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
01081      __builtin_clz(0) is undefined behaviour, so handle this case specially.
01082      This guarantees ARM-compatible results if happening to compile on a non-ARM
01083      target, and ensures the compiler doesn't decide to activate any
01084      optimisations using the logic "value was passed to __builtin_clz, so it
01085      is non-zero".
01086      ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
01087      single CLZ instruction.
01088    */
01089   if (value == 0U)
01090   {
01091     return 32U;
01092   }
01093   return __builtin_clz(value);
01094 }
01095 
01096 
01097 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01098      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01099      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01100      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
01101 /**
01102   \brief   LDR Exclusive (8 bit)
01103   \details Executes a exclusive LDR instruction for 8 bit value.
01104   \param [in]    ptr  Pointer to data
01105   \return             value of type uint8_t at (*ptr)
01106  */
01107 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
01108 {
01109     uint32_t result;
01110 
01111 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01112    __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
01113 #else
01114     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01115        accepted by assembler. So has to use following less efficient pattern.
01116     */
01117    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
01118 #endif
01119    return ((uint8_t) result);    /* Add explicit type cast here */
01120 }
01121 
01122 
01123 /**
01124   \brief   LDR Exclusive (16 bit)
01125   \details Executes a exclusive LDR instruction for 16 bit values.
01126   \param [in]    ptr  Pointer to data
01127   \return        value of type uint16_t at (*ptr)
01128  */
01129 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
01130 {
01131     uint32_t result;
01132 
01133 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01134    __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
01135 #else
01136     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01137        accepted by assembler. So has to use following less efficient pattern.
01138     */
01139    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
01140 #endif
01141    return ((uint16_t) result);    /* Add explicit type cast here */
01142 }
01143 
01144 
01145 /**
01146   \brief   LDR Exclusive (32 bit)
01147   \details Executes a exclusive LDR instruction for 32 bit values.
01148   \param [in]    ptr  Pointer to data
01149   \return        value of type uint32_t at (*ptr)
01150  */
01151 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
01152 {
01153     uint32_t result;
01154 
01155    __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
01156    return(result);
01157 }
01158 
01159 
01160 /**
01161   \brief   STR Exclusive (8 bit)
01162   \details Executes a exclusive STR instruction for 8 bit values.
01163   \param [in]  value  Value to store
01164   \param [in]    ptr  Pointer to location
01165   \return          0  Function succeeded
01166   \return          1  Function failed
01167  */
01168 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
01169 {
01170    uint32_t result;
01171 
01172    __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
01173    return(result);
01174 }
01175 
01176 
01177 /**
01178   \brief   STR Exclusive (16 bit)
01179   \details Executes a exclusive STR instruction for 16 bit values.
01180   \param [in]  value  Value to store
01181   \param [in]    ptr  Pointer to location
01182   \return          0  Function succeeded
01183   \return          1  Function failed
01184  */
01185 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
01186 {
01187    uint32_t result;
01188 
01189    __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
01190    return(result);
01191 }
01192 
01193 
01194 /**
01195   \brief   STR Exclusive (32 bit)
01196   \details Executes a exclusive STR instruction for 32 bit values.
01197   \param [in]  value  Value to store
01198   \param [in]    ptr  Pointer to location
01199   \return          0  Function succeeded
01200   \return          1  Function failed
01201  */
01202 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
01203 {
01204    uint32_t result;
01205 
01206    __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
01207    return(result);
01208 }
01209 
01210 
01211 /**
01212   \brief   Remove the exclusive lock
01213   \details Removes the exclusive lock which is created by LDREX.
01214  */
01215 __STATIC_FORCEINLINE void __CLREX(void)
01216 {
01217   __ASM volatile ("clrex" ::: "memory");
01218 }
01219 
01220 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01221            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01222            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01223            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
01224 
01225 
01226 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01227      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01228      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
01229 /**
01230   \brief   Signed Saturate
01231   \details Saturates a signed value.
01232   \param [in]  ARG1  Value to be saturated
01233   \param [in]  ARG2  Bit position to saturate to (1..32)
01234   \return             Saturated value
01235  */
01236 #define __SSAT(ARG1, ARG2) \
01237 __extension__ \
01238 ({                          \
01239   int32_t __RES, __ARG1 = (ARG1); \
01240   __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
01241   __RES; \
01242  })
01243 
01244 
01245 /**
01246   \brief   Unsigned Saturate
01247   \details Saturates an unsigned value.
01248   \param [in]  ARG1  Value to be saturated
01249   \param [in]  ARG2  Bit position to saturate to (0..31)
01250   \return             Saturated value
01251  */
01252 #define __USAT(ARG1, ARG2) \
01253  __extension__ \
01254 ({                          \
01255   uint32_t __RES, __ARG1 = (ARG1); \
01256   __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
01257   __RES; \
01258  })
01259 
01260 
01261 /**
01262   \brief   Rotate Right with Extend (32 bit)
01263   \details Moves each bit of a bitstring right by one bit.
01264            The carry input is shifted in at the left end of the bitstring.
01265   \param [in]    value  Value to rotate
01266   \return               Rotated value
01267  */
01268 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
01269 {
01270   uint32_t result;
01271 
01272   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
01273   return(result);
01274 }
01275 
01276 
01277 /**
01278   \brief   LDRT Unprivileged (8 bit)
01279   \details Executes a Unprivileged LDRT instruction for 8 bit value.
01280   \param [in]    ptr  Pointer to data
01281   \return             value of type uint8_t at (*ptr)
01282  */
01283 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
01284 {
01285     uint32_t result;
01286 
01287 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01288    __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
01289 #else
01290     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01291        accepted by assembler. So has to use following less efficient pattern.
01292     */
01293    __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
01294 #endif
01295    return ((uint8_t) result);    /* Add explicit type cast here */
01296 }
01297 
01298 
01299 /**
01300   \brief   LDRT Unprivileged (16 bit)
01301   \details Executes a Unprivileged LDRT instruction for 16 bit values.
01302   \param [in]    ptr  Pointer to data
01303   \return        value of type uint16_t at (*ptr)
01304  */
01305 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
01306 {
01307     uint32_t result;
01308 
01309 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
01310    __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
01311 #else
01312     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
01313        accepted by assembler. So has to use following less efficient pattern.
01314     */
01315    __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
01316 #endif
01317    return ((uint16_t) result);    /* Add explicit type cast here */
01318 }
01319 
01320 
01321 /**
01322   \brief   LDRT Unprivileged (32 bit)
01323   \details Executes a Unprivileged LDRT instruction for 32 bit values.
01324   \param [in]    ptr  Pointer to data
01325   \return        value of type uint32_t at (*ptr)
01326  */
01327 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
01328 {
01329     uint32_t result;
01330 
01331    __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
01332    return(result);
01333 }
01334 
01335 
01336 /**
01337   \brief   STRT Unprivileged (8 bit)
01338   \details Executes a Unprivileged STRT instruction for 8 bit values.
01339   \param [in]  value  Value to store
01340   \param [in]    ptr  Pointer to location
01341  */
01342 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
01343 {
01344    __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01345 }
01346 
01347 
01348 /**
01349   \brief   STRT Unprivileged (16 bit)
01350   \details Executes a Unprivileged STRT instruction for 16 bit values.
01351   \param [in]  value  Value to store
01352   \param [in]    ptr  Pointer to location
01353  */
01354 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
01355 {
01356    __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
01357 }
01358 
01359 
01360 /**
01361   \brief   STRT Unprivileged (32 bit)
01362   \details Executes a Unprivileged STRT instruction for 32 bit values.
01363   \param [in]  value  Value to store
01364   \param [in]    ptr  Pointer to location
01365  */
01366 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
01367 {
01368    __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
01369 }
01370 
01371 #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01372            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01373            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
01374 
01375 /**
01376   \brief   Signed Saturate
01377   \details Saturates a signed value.
01378   \param [in]  value  Value to be saturated
01379   \param [in]    sat  Bit position to saturate to (1..32)
01380   \return             Saturated value
01381  */
01382 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
01383 {
01384   if ((sat >= 1U) && (sat <= 32U))
01385   {
01386     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
01387     const int32_t min = -1 - max ;
01388     if (val > max)
01389     {
01390       return max;
01391     }
01392     else if (val < min)
01393     {
01394       return min;
01395     }
01396   }
01397   return val;
01398 }
01399 
01400 /**
01401   \brief   Unsigned Saturate
01402   \details Saturates an unsigned value.
01403   \param [in]  value  Value to be saturated
01404   \param [in]    sat  Bit position to saturate to (0..31)
01405   \return             Saturated value
01406  */
01407 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
01408 {
01409   if (sat <= 31U)
01410   {
01411     const uint32_t max = ((1U << sat) - 1U);
01412     if (val > (int32_t)max)
01413     {
01414       return max;
01415     }
01416     else if (val < 0)
01417     {
01418       return 0U;
01419     }
01420   }
01421   return (uint32_t)val;
01422 }
01423 
01424 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
01425            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
01426            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
01427 
01428 
01429 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01430      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
01431 /**
01432   \brief   Load-Acquire (8 bit)
01433   \details Executes a LDAB instruction for 8 bit value.
01434   \param [in]    ptr  Pointer to data
01435   \return             value of type uint8_t at (*ptr)
01436  */
01437 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
01438 {
01439     uint32_t result;
01440 
01441    __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
01442    return ((uint8_t) result);
01443 }
01444 
01445 
01446 /**
01447   \brief   Load-Acquire (16 bit)
01448   \details Executes a LDAH instruction for 16 bit values.
01449   \param [in]    ptr  Pointer to data
01450   \return        value of type uint16_t at (*ptr)
01451  */
01452 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
01453 {
01454     uint32_t result;
01455 
01456    __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
01457    return ((uint16_t) result);
01458 }
01459 
01460 
01461 /**
01462   \brief   Load-Acquire (32 bit)
01463   \details Executes a LDA instruction for 32 bit values.
01464   \param [in]    ptr  Pointer to data
01465   \return        value of type uint32_t at (*ptr)
01466  */
01467 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
01468 {
01469     uint32_t result;
01470 
01471    __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
01472    return(result);
01473 }
01474 
01475 
01476 /**
01477   \brief   Store-Release (8 bit)
01478   \details Executes a STLB instruction for 8 bit values.
01479   \param [in]  value  Value to store
01480   \param [in]    ptr  Pointer to location
01481  */
01482 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
01483 {
01484    __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
01485 }
01486 
01487 
01488 /**
01489   \brief   Store-Release (16 bit)
01490   \details Executes a STLH instruction for 16 bit values.
01491   \param [in]  value  Value to store
01492   \param [in]    ptr  Pointer to location
01493  */
01494 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
01495 {
01496    __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
01497 }
01498 
01499 
01500 /**
01501   \brief   Store-Release (32 bit)
01502   \details Executes a STL instruction for 32 bit values.
01503   \param [in]  value  Value to store
01504   \param [in]    ptr  Pointer to location
01505  */
01506 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
01507 {
01508    __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
01509 }
01510 
01511 
01512 /**
01513   \brief   Load-Acquire Exclusive (8 bit)
01514   \details Executes a LDAB exclusive instruction for 8 bit value.
01515   \param [in]    ptr  Pointer to data
01516   \return             value of type uint8_t at (*ptr)
01517  */
01518 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
01519 {
01520     uint32_t result;
01521 
01522    __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
01523    return ((uint8_t) result);
01524 }
01525 
01526 
01527 /**
01528   \brief   Load-Acquire Exclusive (16 bit)
01529   \details Executes a LDAH exclusive instruction for 16 bit values.
01530   \param [in]    ptr  Pointer to data
01531   \return        value of type uint16_t at (*ptr)
01532  */
01533 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
01534 {
01535     uint32_t result;
01536 
01537    __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
01538    return ((uint16_t) result);
01539 }
01540 
01541 
01542 /**
01543   \brief   Load-Acquire Exclusive (32 bit)
01544   \details Executes a LDA exclusive instruction for 32 bit values.
01545   \param [in]    ptr  Pointer to data
01546   \return        value of type uint32_t at (*ptr)
01547  */
01548 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
01549 {
01550     uint32_t result;
01551 
01552    __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
01553    return(result);
01554 }
01555 
01556 
01557 /**
01558   \brief   Store-Release Exclusive (8 bit)
01559   \details Executes a STLB exclusive instruction for 8 bit values.
01560   \param [in]  value  Value to store
01561   \param [in]    ptr  Pointer to location
01562   \return          0  Function succeeded
01563   \return          1  Function failed
01564  */
01565 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
01566 {
01567    uint32_t result;
01568 
01569    __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
01570    return(result);
01571 }
01572 
01573 
01574 /**
01575   \brief   Store-Release Exclusive (16 bit)
01576   \details Executes a STLH exclusive instruction for 16 bit values.
01577   \param [in]  value  Value to store
01578   \param [in]    ptr  Pointer to location
01579   \return          0  Function succeeded
01580   \return          1  Function failed
01581  */
01582 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
01583 {
01584    uint32_t result;
01585 
01586    __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
01587    return(result);
01588 }
01589 
01590 
01591 /**
01592   \brief   Store-Release Exclusive (32 bit)
01593   \details Executes a STL exclusive instruction for 32 bit values.
01594   \param [in]  value  Value to store
01595   \param [in]    ptr  Pointer to location
01596   \return          0  Function succeeded
01597   \return          1  Function failed
01598  */
01599 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
01600 {
01601    uint32_t result;
01602 
01603    __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
01604    return(result);
01605 }
01606 
01607 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
01608            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
01609 
01610 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
01611 
01612 
01613 /* ###################  Compiler specific Intrinsics  ########################### */
01614 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
01615   Access to dedicated SIMD instructions
01616   @{
01617 */
01618 
01619 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
01620 
01621 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
01622 {
01623   uint32_t result;
01624 
01625   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01626   return(result);
01627 }
01628 
01629 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
01630 {
01631   uint32_t result;
01632 
01633   __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01634   return(result);
01635 }
01636 
01637 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
01638 {
01639   uint32_t result;
01640 
01641   __ASM ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01642   return(result);
01643 }
01644 
01645 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
01646 {
01647   uint32_t result;
01648 
01649   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01650   return(result);
01651 }
01652 
01653 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
01654 {
01655   uint32_t result;
01656 
01657   __ASM ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01658   return(result);
01659 }
01660 
01661 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
01662 {
01663   uint32_t result;
01664 
01665   __ASM ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01666   return(result);
01667 }
01668 
01669 
01670 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
01671 {
01672   uint32_t result;
01673 
01674   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01675   return(result);
01676 }
01677 
01678 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
01679 {
01680   uint32_t result;
01681 
01682   __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01683   return(result);
01684 }
01685 
01686 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
01687 {
01688   uint32_t result;
01689 
01690   __ASM ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01691   return(result);
01692 }
01693 
01694 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
01695 {
01696   uint32_t result;
01697 
01698   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01699   return(result);
01700 }
01701 
01702 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
01703 {
01704   uint32_t result;
01705 
01706   __ASM ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01707   return(result);
01708 }
01709 
01710 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
01711 {
01712   uint32_t result;
01713 
01714   __ASM ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01715   return(result);
01716 }
01717 
01718 
01719 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
01720 {
01721   uint32_t result;
01722 
01723   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01724   return(result);
01725 }
01726 
01727 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
01728 {
01729   uint32_t result;
01730 
01731   __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01732   return(result);
01733 }
01734 
01735 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
01736 {
01737   uint32_t result;
01738 
01739   __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01740   return(result);
01741 }
01742 
01743 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
01744 {
01745   uint32_t result;
01746 
01747   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01748   return(result);
01749 }
01750 
01751 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
01752 {
01753   uint32_t result;
01754 
01755   __ASM ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01756   return(result);
01757 }
01758 
01759 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
01760 {
01761   uint32_t result;
01762 
01763   __ASM ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01764   return(result);
01765 }
01766 
01767 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
01768 {
01769   uint32_t result;
01770 
01771   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01772   return(result);
01773 }
01774 
01775 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
01776 {
01777   uint32_t result;
01778 
01779   __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01780   return(result);
01781 }
01782 
01783 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
01784 {
01785   uint32_t result;
01786 
01787   __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01788   return(result);
01789 }
01790 
01791 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
01792 {
01793   uint32_t result;
01794 
01795   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01796   return(result);
01797 }
01798 
01799 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
01800 {
01801   uint32_t result;
01802 
01803   __ASM ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01804   return(result);
01805 }
01806 
01807 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
01808 {
01809   uint32_t result;
01810 
01811   __ASM ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01812   return(result);
01813 }
01814 
01815 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
01816 {
01817   uint32_t result;
01818 
01819   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01820   return(result);
01821 }
01822 
01823 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
01824 {
01825   uint32_t result;
01826 
01827   __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01828   return(result);
01829 }
01830 
01831 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
01832 {
01833   uint32_t result;
01834 
01835   __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01836   return(result);
01837 }
01838 
01839 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
01840 {
01841   uint32_t result;
01842 
01843   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01844   return(result);
01845 }
01846 
01847 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
01848 {
01849   uint32_t result;
01850 
01851   __ASM ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01852   return(result);
01853 }
01854 
01855 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
01856 {
01857   uint32_t result;
01858 
01859   __ASM ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01860   return(result);
01861 }
01862 
01863 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
01864 {
01865   uint32_t result;
01866 
01867   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01868   return(result);
01869 }
01870 
01871 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
01872 {
01873   uint32_t result;
01874 
01875   __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01876   return(result);
01877 }
01878 
01879 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
01880 {
01881   uint32_t result;
01882 
01883   __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01884   return(result);
01885 }
01886 
01887 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
01888 {
01889   uint32_t result;
01890 
01891   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01892   return(result);
01893 }
01894 
01895 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
01896 {
01897   uint32_t result;
01898 
01899   __ASM ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01900   return(result);
01901 }
01902 
01903 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
01904 {
01905   uint32_t result;
01906 
01907   __ASM ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01908   return(result);
01909 }
01910 
01911 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
01912 {
01913   uint32_t result;
01914 
01915   __ASM ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01916   return(result);
01917 }
01918 
01919 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
01920 {
01921   uint32_t result;
01922 
01923   __ASM ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01924   return(result);
01925 }
01926 
01927 #define __SSAT16(ARG1, ARG2) \
01928 ({                          \
01929   int32_t __RES, __ARG1 = (ARG1); \
01930   __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
01931   __RES; \
01932  })
01933 
01934 #define __USAT16(ARG1, ARG2) \
01935 ({                          \
01936   uint32_t __RES, __ARG1 = (ARG1); \
01937   __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
01938   __RES; \
01939  })
01940 
01941 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
01942 {
01943   uint32_t result;
01944 
01945   __ASM ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
01946   return(result);
01947 }
01948 
01949 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
01950 {
01951   uint32_t result;
01952 
01953   __ASM ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01954   return(result);
01955 }
01956 
01957 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
01958 {
01959   uint32_t result;
01960 
01961   __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
01962   return(result);
01963 }
01964 
01965 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
01966 {
01967   uint32_t result;
01968 
01969   __ASM ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01970   return(result);
01971 }
01972 
01973 __STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
01974 {
01975   uint32_t result;
01976 
01977   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01978   return(result);
01979 }
01980 
01981 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
01982 {
01983   uint32_t result;
01984 
01985   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
01986   return(result);
01987 }
01988 
01989 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
01990 {
01991   uint32_t result;
01992 
01993   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
01994   return(result);
01995 }
01996 
01997 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
01998 {
01999   uint32_t result;
02000 
02001   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
02002   return(result);
02003 }
02004 
02005 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
02006 {
02007   union llreg_u{
02008     uint32_t w32[2];
02009     uint64_t w64;
02010   } llr;
02011   llr.w64 = acc;
02012 
02013 #ifndef __ARMEB__   /* Little endian */
02014   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
02015 #else               /* Big endian */
02016   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
02017 #endif
02018 
02019   return(llr.w64);
02020 }
02021 
02022 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
02023 {
02024   union llreg_u{
02025     uint32_t w32[2];
02026     uint64_t w64;
02027   } llr;
02028   llr.w64 = acc;
02029 
02030 #ifndef __ARMEB__   /* Little endian */
02031   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
02032 #else               /* Big endian */
02033   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
02034 #endif
02035 
02036   return(llr.w64);
02037 }
02038 
02039 __STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
02040 {
02041   uint32_t result;
02042 
02043   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02044   return(result);
02045 }
02046 
02047 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
02048 {
02049   uint32_t result;
02050 
02051   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02052   return(result);
02053 }
02054 
02055 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
02056 {
02057   uint32_t result;
02058 
02059   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
02060   return(result);
02061 }
02062 
02063 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
02064 {
02065   uint32_t result;
02066 
02067   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
02068   return(result);
02069 }
02070 
02071 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
02072 {
02073   union llreg_u{
02074     uint32_t w32[2];
02075     uint64_t w64;
02076   } llr;
02077   llr.w64 = acc;
02078 
02079 #ifndef __ARMEB__   /* Little endian */
02080   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
02081 #else               /* Big endian */
02082   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
02083 #endif
02084 
02085   return(llr.w64);
02086 }
02087 
02088 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
02089 {
02090   union llreg_u{
02091     uint32_t w32[2];
02092     uint64_t w64;
02093   } llr;
02094   llr.w64 = acc;
02095 
02096 #ifndef __ARMEB__   /* Little endian */
02097   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
02098 #else               /* Big endian */
02099   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
02100 #endif
02101 
02102   return(llr.w64);
02103 }
02104 
02105 __STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
02106 {
02107   uint32_t result;
02108 
02109   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02110   return(result);
02111 }
02112 
02113 __STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
02114 {
02115   int32_t result;
02116 
02117   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02118   return(result);
02119 }
02120 
02121 __STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
02122 {
02123   int32_t result;
02124 
02125   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
02126   return(result);
02127 }
02128 
02129 #if 0
02130 #define __PKHBT(ARG1,ARG2,ARG3) \
02131 ({                          \
02132   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
02133   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
02134   __RES; \
02135  })
02136 
02137 #define __PKHTB(ARG1,ARG2,ARG3) \
02138 ({                          \
02139   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
02140   if (ARG3 == 0) \
02141     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
02142   else \
02143     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
02144   __RES; \
02145  })
02146 #endif
02147 
02148 #define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
02149                                            ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
02150 
02151 #define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
02152                                            ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
02153 
02154 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
02155 {
02156  int32_t result;
02157 
02158  __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
02159  return(result);
02160 }
02161 
02162 #endif /* (__ARM_FEATURE_DSP == 1) */
02163 /*@} end of group CMSIS_SIMD_intrinsics */
02164 
02165 
02166 #pragma GCC diagnostic pop
02167 
02168 #endif /* __CMSIS_GCC_H */