Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
cmsis_armclang_ltm.h
00001 /**************************************************************************//** 00002 * @file cmsis_armclang_ltm.h 00003 * @brief CMSIS compiler armclang (Arm Compiler 6) header file 00004 * @version V1.2.1 00005 * @date 30. July 2019 00006 ******************************************************************************/ 00007 /* 00008 * Copyright (c) 2018-2019 Arm Limited. All rights reserved. 00009 * 00010 * SPDX-License-Identifier: Apache-2.0 00011 * 00012 * Licensed under the Apache License, Version 2.0 (the License); you may 00013 * not use this file except in compliance with the License. 00014 * You may obtain a copy of the License at 00015 * 00016 * www.apache.org/licenses/LICENSE-2.0 00017 * 00018 * Unless required by applicable law or agreed to in writing, software 00019 * distributed under the License is distributed on an AS IS BASIS, WITHOUT 00020 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00021 * See the License for the specific language governing permissions and 00022 * limitations under the License. 00023 */ 00024 00025 /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */ 00026 00027 #ifndef __CMSIS_ARMCLANG_H 00028 #define __CMSIS_ARMCLANG_H 00029 00030 #pragma clang system_header /* treat file as system include file */ 00031 00032 #ifndef __ARM_COMPAT_H 00033 #include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */ 00034 #endif 00035 00036 /* CMSIS compiler specific defines */ 00037 #ifndef __ASM 00038 #define __ASM __asm 00039 #endif 00040 #ifndef __INLINE 00041 #define __INLINE __inline 00042 #endif 00043 #ifndef __STATIC_INLINE 00044 #define __STATIC_INLINE static __inline 00045 #endif 00046 #ifndef __STATIC_FORCEINLINE 00047 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline 00048 #endif 00049 #ifndef __NO_RETURN 00050 #define __NO_RETURN __attribute__((__noreturn__)) 00051 #endif 00052 #ifndef __USED 00053 #define __USED __attribute__((used)) 00054 #endif 00055 #ifndef __WEAK 00056 #define __WEAK __attribute__((weak)) 00057 #endif 00058 #ifndef __PACKED 00059 #define __PACKED __attribute__((packed, aligned(1))) 00060 #endif 00061 #ifndef __PACKED_STRUCT 00062 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) 00063 #endif 00064 #ifndef __PACKED_UNION 00065 #define __PACKED_UNION union __attribute__((packed, aligned(1))) 00066 #endif 00067 #ifndef __UNALIGNED_UINT32 /* deprecated */ 00068 #pragma clang diagnostic push 00069 #pragma clang diagnostic ignored "-Wpacked" 00070 /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */ 00071 struct __attribute__((packed)) T_UINT32 { uint32_t v; }; 00072 #pragma clang diagnostic pop 00073 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) 00074 #endif 00075 #ifndef __UNALIGNED_UINT16_WRITE 00076 #pragma clang diagnostic push 00077 #pragma clang diagnostic ignored "-Wpacked" 00078 /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */ 00079 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; }; 00080 #pragma clang diagnostic pop 00081 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val)) 00082 #endif 00083 #ifndef __UNALIGNED_UINT16_READ 00084 #pragma clang diagnostic push 00085 #pragma clang diagnostic ignored "-Wpacked" 00086 /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */ 00087 __PACKED_STRUCT T_UINT16_READ { uint16_t v; }; 00088 #pragma clang diagnostic pop 00089 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v) 00090 #endif 00091 #ifndef __UNALIGNED_UINT32_WRITE 00092 #pragma clang diagnostic push 00093 #pragma clang diagnostic ignored "-Wpacked" 00094 /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */ 00095 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; }; 00096 #pragma clang diagnostic pop 00097 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val)) 00098 #endif 00099 #ifndef __UNALIGNED_UINT32_READ 00100 #pragma clang diagnostic push 00101 #pragma clang diagnostic ignored "-Wpacked" 00102 /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */ 00103 __PACKED_STRUCT T_UINT32_READ { uint32_t v; }; 00104 #pragma clang diagnostic pop 00105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) 00106 #endif 00107 #ifndef __ALIGNED 00108 #define __ALIGNED(x) __attribute__((aligned(x))) 00109 #endif 00110 #ifndef __RESTRICT 00111 #define __RESTRICT __restrict 00112 #endif 00113 #ifndef __COMPILER_BARRIER 00114 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 00115 #endif 00116 00117 /* ######################### Startup and Lowlevel Init ######################## */ 00118 00119 #ifndef __PROGRAM_START 00120 #define __PROGRAM_START __main 00121 #endif 00122 00123 #ifndef __INITIAL_SP 00124 #define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit 00125 #endif 00126 00127 #ifndef __STACK_LIMIT 00128 #define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base 00129 #endif 00130 00131 #ifndef __VECTOR_TABLE 00132 #define __VECTOR_TABLE __Vectors 00133 #endif 00134 00135 #ifndef __VECTOR_TABLE_ATTRIBUTE 00136 #define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section("RESET"))) 00137 #endif 00138 00139 00140 /* ########################### Core Function Access ########################### */ 00141 /** \ingroup CMSIS_Core_FunctionInterface 00142 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions 00143 @{ 00144 */ 00145 00146 /** 00147 \brief Enable IRQ Interrupts 00148 \details Enables IRQ interrupts by clearing the I-bit in the CPSR. 00149 Can only be executed in Privileged modes. 00150 */ 00151 /* intrinsic void __enable_irq(); see arm_compat.h */ 00152 00153 00154 /** 00155 \brief Disable IRQ Interrupts 00156 \details Disables IRQ interrupts by setting the I-bit in the CPSR. 00157 Can only be executed in Privileged modes. 00158 */ 00159 /* intrinsic void __disable_irq(); see arm_compat.h */ 00160 00161 00162 /** 00163 \brief Get Control Register 00164 \details Returns the content of the Control Register. 00165 \return Control Register value 00166 */ 00167 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void) 00168 { 00169 uint32_t result; 00170 00171 __ASM volatile ("MRS %0, control" : "=r" (result) ); 00172 return(result); 00173 } 00174 00175 00176 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00177 /** 00178 \brief Get Control Register (non-secure) 00179 \details Returns the content of the non-secure Control Register when in secure mode. 00180 \return non-secure Control Register value 00181 */ 00182 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) 00183 { 00184 uint32_t result; 00185 00186 __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); 00187 return(result); 00188 } 00189 #endif 00190 00191 00192 /** 00193 \brief Set Control Register 00194 \details Writes the given value to the Control Register. 00195 \param [in] control Control Register value to set 00196 */ 00197 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) 00198 { 00199 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); 00200 } 00201 00202 00203 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00204 /** 00205 \brief Set Control Register (non-secure) 00206 \details Writes the given value to the non-secure Control Register when in secure state. 00207 \param [in] control Control Register value to set 00208 */ 00209 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) 00210 { 00211 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); 00212 } 00213 #endif 00214 00215 00216 /** 00217 \brief Get IPSR Register 00218 \details Returns the content of the IPSR Register. 00219 \return IPSR Register value 00220 */ 00221 __STATIC_FORCEINLINE uint32_t __get_IPSR(void) 00222 { 00223 uint32_t result; 00224 00225 __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); 00226 return(result); 00227 } 00228 00229 00230 /** 00231 \brief Get APSR Register 00232 \details Returns the content of the APSR Register. 00233 \return APSR Register value 00234 */ 00235 __STATIC_FORCEINLINE uint32_t __get_APSR(void) 00236 { 00237 uint32_t result; 00238 00239 __ASM volatile ("MRS %0, apsr" : "=r" (result) ); 00240 return(result); 00241 } 00242 00243 00244 /** 00245 \brief Get xPSR Register 00246 \details Returns the content of the xPSR Register. 00247 \return xPSR Register value 00248 */ 00249 __STATIC_FORCEINLINE uint32_t __get_xPSR(void) 00250 { 00251 uint32_t result; 00252 00253 __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); 00254 return(result); 00255 } 00256 00257 00258 /** 00259 \brief Get Process Stack Pointer 00260 \details Returns the current value of the Process Stack Pointer (PSP). 00261 \return PSP Register value 00262 */ 00263 __STATIC_FORCEINLINE uint32_t __get_PSP(void) 00264 { 00265 uint32_t result; 00266 00267 __ASM volatile ("MRS %0, psp" : "=r" (result) ); 00268 return(result); 00269 } 00270 00271 00272 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00273 /** 00274 \brief Get Process Stack Pointer (non-secure) 00275 \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. 00276 \return PSP Register value 00277 */ 00278 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) 00279 { 00280 uint32_t result; 00281 00282 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); 00283 return(result); 00284 } 00285 #endif 00286 00287 00288 /** 00289 \brief Set Process Stack Pointer 00290 \details Assigns the given value to the Process Stack Pointer (PSP). 00291 \param [in] topOfProcStack Process Stack Pointer value to set 00292 */ 00293 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) 00294 { 00295 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); 00296 } 00297 00298 00299 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00300 /** 00301 \brief Set Process Stack Pointer (non-secure) 00302 \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. 00303 \param [in] topOfProcStack Process Stack Pointer value to set 00304 */ 00305 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) 00306 { 00307 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); 00308 } 00309 #endif 00310 00311 00312 /** 00313 \brief Get Main Stack Pointer 00314 \details Returns the current value of the Main Stack Pointer (MSP). 00315 \return MSP Register value 00316 */ 00317 __STATIC_FORCEINLINE uint32_t __get_MSP(void) 00318 { 00319 uint32_t result; 00320 00321 __ASM volatile ("MRS %0, msp" : "=r" (result) ); 00322 return(result); 00323 } 00324 00325 00326 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00327 /** 00328 \brief Get Main Stack Pointer (non-secure) 00329 \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. 00330 \return MSP Register value 00331 */ 00332 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) 00333 { 00334 uint32_t result; 00335 00336 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); 00337 return(result); 00338 } 00339 #endif 00340 00341 00342 /** 00343 \brief Set Main Stack Pointer 00344 \details Assigns the given value to the Main Stack Pointer (MSP). 00345 \param [in] topOfMainStack Main Stack Pointer value to set 00346 */ 00347 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) 00348 { 00349 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); 00350 } 00351 00352 00353 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00354 /** 00355 \brief Set Main Stack Pointer (non-secure) 00356 \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. 00357 \param [in] topOfMainStack Main Stack Pointer value to set 00358 */ 00359 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) 00360 { 00361 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); 00362 } 00363 #endif 00364 00365 00366 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00367 /** 00368 \brief Get Stack Pointer (non-secure) 00369 \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. 00370 \return SP Register value 00371 */ 00372 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) 00373 { 00374 uint32_t result; 00375 00376 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); 00377 return(result); 00378 } 00379 00380 00381 /** 00382 \brief Set Stack Pointer (non-secure) 00383 \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. 00384 \param [in] topOfStack Stack Pointer value to set 00385 */ 00386 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) 00387 { 00388 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); 00389 } 00390 #endif 00391 00392 00393 /** 00394 \brief Get Priority Mask 00395 \details Returns the current state of the priority mask bit from the Priority Mask Register. 00396 \return Priority Mask value 00397 */ 00398 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) 00399 { 00400 uint32_t result; 00401 00402 __ASM volatile ("MRS %0, primask" : "=r" (result) ); 00403 return(result); 00404 } 00405 00406 00407 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00408 /** 00409 \brief Get Priority Mask (non-secure) 00410 \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. 00411 \return Priority Mask value 00412 */ 00413 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) 00414 { 00415 uint32_t result; 00416 00417 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); 00418 return(result); 00419 } 00420 #endif 00421 00422 00423 /** 00424 \brief Set Priority Mask 00425 \details Assigns the given value to the Priority Mask Register. 00426 \param [in] priMask Priority Mask 00427 */ 00428 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) 00429 { 00430 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); 00431 } 00432 00433 00434 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00435 /** 00436 \brief Set Priority Mask (non-secure) 00437 \details Assigns the given value to the non-secure Priority Mask Register when in secure state. 00438 \param [in] priMask Priority Mask 00439 */ 00440 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) 00441 { 00442 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); 00443 } 00444 #endif 00445 00446 00447 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 00448 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 00449 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 00450 /** 00451 \brief Enable FIQ 00452 \details Enables FIQ interrupts by clearing the F-bit in the CPSR. 00453 Can only be executed in Privileged modes. 00454 */ 00455 #define __enable_fault_irq __enable_fiq /* see arm_compat.h */ 00456 00457 00458 /** 00459 \brief Disable FIQ 00460 \details Disables FIQ interrupts by setting the F-bit in the CPSR. 00461 Can only be executed in Privileged modes. 00462 */ 00463 #define __disable_fault_irq __disable_fiq /* see arm_compat.h */ 00464 00465 00466 /** 00467 \brief Get Base Priority 00468 \details Returns the current value of the Base Priority register. 00469 \return Base Priority register value 00470 */ 00471 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) 00472 { 00473 uint32_t result; 00474 00475 __ASM volatile ("MRS %0, basepri" : "=r" (result) ); 00476 return(result); 00477 } 00478 00479 00480 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00481 /** 00482 \brief Get Base Priority (non-secure) 00483 \details Returns the current value of the non-secure Base Priority register when in secure state. 00484 \return Base Priority register value 00485 */ 00486 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) 00487 { 00488 uint32_t result; 00489 00490 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); 00491 return(result); 00492 } 00493 #endif 00494 00495 00496 /** 00497 \brief Set Base Priority 00498 \details Assigns the given value to the Base Priority register. 00499 \param [in] basePri Base Priority value to set 00500 */ 00501 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) 00502 { 00503 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); 00504 } 00505 00506 00507 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00508 /** 00509 \brief Set Base Priority (non-secure) 00510 \details Assigns the given value to the non-secure Base Priority register when in secure state. 00511 \param [in] basePri Base Priority value to set 00512 */ 00513 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) 00514 { 00515 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); 00516 } 00517 #endif 00518 00519 00520 /** 00521 \brief Set Base Priority with condition 00522 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, 00523 or the new value increases the BASEPRI priority level. 00524 \param [in] basePri Base Priority value to set 00525 */ 00526 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) 00527 { 00528 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); 00529 } 00530 00531 00532 /** 00533 \brief Get Fault Mask 00534 \details Returns the current value of the Fault Mask register. 00535 \return Fault Mask register value 00536 */ 00537 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) 00538 { 00539 uint32_t result; 00540 00541 __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); 00542 return(result); 00543 } 00544 00545 00546 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00547 /** 00548 \brief Get Fault Mask (non-secure) 00549 \details Returns the current value of the non-secure Fault Mask register when in secure state. 00550 \return Fault Mask register value 00551 */ 00552 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) 00553 { 00554 uint32_t result; 00555 00556 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); 00557 return(result); 00558 } 00559 #endif 00560 00561 00562 /** 00563 \brief Set Fault Mask 00564 \details Assigns the given value to the Fault Mask register. 00565 \param [in] faultMask Fault Mask value to set 00566 */ 00567 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) 00568 { 00569 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); 00570 } 00571 00572 00573 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00574 /** 00575 \brief Set Fault Mask (non-secure) 00576 \details Assigns the given value to the non-secure Fault Mask register when in secure state. 00577 \param [in] faultMask Fault Mask value to set 00578 */ 00579 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) 00580 { 00581 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); 00582 } 00583 #endif 00584 00585 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 00586 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 00587 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */ 00588 00589 00590 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 00591 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 00592 00593 /** 00594 \brief Get Process Stack Pointer Limit 00595 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00596 Stack Pointer Limit register hence zero is returned always in non-secure 00597 mode. 00598 00599 \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). 00600 \return PSPLIM Register value 00601 */ 00602 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) 00603 { 00604 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 00605 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 00606 // without main extensions, the non-secure PSPLIM is RAZ/WI 00607 return 0U; 00608 #else 00609 uint32_t result; 00610 __ASM volatile ("MRS %0, psplim" : "=r" (result) ); 00611 return result; 00612 #endif 00613 } 00614 00615 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)) 00616 /** 00617 \brief Get Process Stack Pointer Limit (non-secure) 00618 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00619 Stack Pointer Limit register hence zero is returned always in non-secure 00620 mode. 00621 00622 \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. 00623 \return PSPLIM Register value 00624 */ 00625 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) 00626 { 00627 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 00628 // without main extensions, the non-secure PSPLIM is RAZ/WI 00629 return 0U; 00630 #else 00631 uint32_t result; 00632 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); 00633 return result; 00634 #endif 00635 } 00636 #endif 00637 00638 00639 /** 00640 \brief Set Process Stack Pointer Limit 00641 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00642 Stack Pointer Limit register hence the write is silently ignored in non-secure 00643 mode. 00644 00645 \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). 00646 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set 00647 */ 00648 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) 00649 { 00650 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 00651 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 00652 // without main extensions, the non-secure PSPLIM is RAZ/WI 00653 (void)ProcStackPtrLimit; 00654 #else 00655 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); 00656 #endif 00657 } 00658 00659 00660 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00661 /** 00662 \brief Set Process Stack Pointer (non-secure) 00663 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00664 Stack Pointer Limit register hence the write is silently ignored in non-secure 00665 mode. 00666 00667 \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. 00668 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set 00669 */ 00670 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) 00671 { 00672 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 00673 // without main extensions, the non-secure PSPLIM is RAZ/WI 00674 (void)ProcStackPtrLimit; 00675 #else 00676 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); 00677 #endif 00678 } 00679 #endif 00680 00681 00682 /** 00683 \brief Get Main Stack Pointer Limit 00684 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00685 Stack Pointer Limit register hence zero is returned always. 00686 00687 \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). 00688 \return MSPLIM Register value 00689 */ 00690 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) 00691 { 00692 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 00693 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 00694 // without main extensions, the non-secure MSPLIM is RAZ/WI 00695 return 0U; 00696 #else 00697 uint32_t result; 00698 __ASM volatile ("MRS %0, msplim" : "=r" (result) ); 00699 return result; 00700 #endif 00701 } 00702 00703 00704 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00705 /** 00706 \brief Get Main Stack Pointer Limit (non-secure) 00707 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00708 Stack Pointer Limit register hence zero is returned always. 00709 00710 \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. 00711 \return MSPLIM Register value 00712 */ 00713 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) 00714 { 00715 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 00716 // without main extensions, the non-secure MSPLIM is RAZ/WI 00717 return 0U; 00718 #else 00719 uint32_t result; 00720 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); 00721 return result; 00722 #endif 00723 } 00724 #endif 00725 00726 00727 /** 00728 \brief Set Main Stack Pointer Limit 00729 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00730 Stack Pointer Limit register hence the write is silently ignored. 00731 00732 \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). 00733 \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set 00734 */ 00735 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) 00736 { 00737 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 00738 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 00739 // without main extensions, the non-secure MSPLIM is RAZ/WI 00740 (void)MainStackPtrLimit; 00741 #else 00742 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); 00743 #endif 00744 } 00745 00746 00747 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 00748 /** 00749 \brief Set Main Stack Pointer Limit (non-secure) 00750 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 00751 Stack Pointer Limit register hence the write is silently ignored. 00752 00753 \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. 00754 \param [in] MainStackPtrLimit Main Stack Pointer value to set 00755 */ 00756 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) 00757 { 00758 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 00759 // without main extensions, the non-secure MSPLIM is RAZ/WI 00760 (void)MainStackPtrLimit; 00761 #else 00762 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); 00763 #endif 00764 } 00765 #endif 00766 00767 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 00768 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */ 00769 00770 /** 00771 \brief Get FPSCR 00772 \details Returns the current value of the Floating Point Status/Control register. 00773 \return Floating Point Status/Control register value 00774 */ 00775 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 00776 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 00777 #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr 00778 #else 00779 #define __get_FPSCR() ((uint32_t)0U) 00780 #endif 00781 00782 /** 00783 \brief Set FPSCR 00784 \details Assigns the given value to the Floating Point Status/Control register. 00785 \param [in] fpscr Floating Point Status/Control value to set 00786 */ 00787 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 00788 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 00789 #define __set_FPSCR __builtin_arm_set_fpscr 00790 #else 00791 #define __set_FPSCR(x) ((void)(x)) 00792 #endif 00793 00794 00795 /*@} end of CMSIS_Core_RegAccFunctions */ 00796 00797 00798 /* ########################## Core Instruction Access ######################### */ 00799 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface 00800 Access to dedicated instructions 00801 @{ 00802 */ 00803 00804 /* Define macros for porting to both thumb1 and thumb2. 00805 * For thumb1, use low register (r0-r7), specified by constraint "l" 00806 * Otherwise, use general registers, specified by constraint "r" */ 00807 #if defined (__thumb__) && !defined (__thumb2__) 00808 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 00809 #define __CMSIS_GCC_USE_REG(r) "l" (r) 00810 #else 00811 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 00812 #define __CMSIS_GCC_USE_REG(r) "r" (r) 00813 #endif 00814 00815 /** 00816 \brief No Operation 00817 \details No Operation does nothing. This instruction can be used for code alignment purposes. 00818 */ 00819 #define __NOP __builtin_arm_nop 00820 00821 /** 00822 \brief Wait For Interrupt 00823 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs. 00824 */ 00825 #define __WFI __builtin_arm_wfi 00826 00827 00828 /** 00829 \brief Wait For Event 00830 \details Wait For Event is a hint instruction that permits the processor to enter 00831 a low-power state until one of a number of events occurs. 00832 */ 00833 #define __WFE __builtin_arm_wfe 00834 00835 00836 /** 00837 \brief Send Event 00838 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU. 00839 */ 00840 #define __SEV __builtin_arm_sev 00841 00842 00843 /** 00844 \brief Instruction Synchronization Barrier 00845 \details Instruction Synchronization Barrier flushes the pipeline in the processor, 00846 so that all instructions following the ISB are fetched from cache or memory, 00847 after the instruction has been completed. 00848 */ 00849 #define __ISB() __builtin_arm_isb(0xF) 00850 00851 /** 00852 \brief Data Synchronization Barrier 00853 \details Acts as a special kind of Data Memory Barrier. 00854 It completes when all explicit memory accesses before this instruction complete. 00855 */ 00856 #define __DSB() __builtin_arm_dsb(0xF) 00857 00858 00859 /** 00860 \brief Data Memory Barrier 00861 \details Ensures the apparent order of the explicit memory operations before 00862 and after the instruction, without ensuring their completion. 00863 */ 00864 #define __DMB() __builtin_arm_dmb(0xF) 00865 00866 00867 /** 00868 \brief Reverse byte order (32 bit) 00869 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412. 00870 \param [in] value Value to reverse 00871 \return Reversed value 00872 */ 00873 #define __REV(value) __builtin_bswap32(value) 00874 00875 00876 /** 00877 \brief Reverse byte order (16 bit) 00878 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856. 00879 \param [in] value Value to reverse 00880 \return Reversed value 00881 */ 00882 #define __REV16(value) __ROR(__REV(value), 16) 00883 00884 00885 /** 00886 \brief Reverse byte order (16 bit) 00887 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000. 00888 \param [in] value Value to reverse 00889 \return Reversed value 00890 */ 00891 #define __REVSH(value) (int16_t)__builtin_bswap16(value) 00892 00893 00894 /** 00895 \brief Rotate Right in unsigned value (32 bit) 00896 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. 00897 \param [in] op1 Value to rotate 00898 \param [in] op2 Number of Bits to rotate 00899 \return Rotated value 00900 */ 00901 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2) 00902 { 00903 op2 %= 32U; 00904 if (op2 == 0U) 00905 { 00906 return op1; 00907 } 00908 return (op1 >> op2) | (op1 << (32U - op2)); 00909 } 00910 00911 00912 /** 00913 \brief Breakpoint 00914 \details Causes the processor to enter Debug state. 00915 Debug tools can use this to investigate system state when the instruction at a particular address is reached. 00916 \param [in] value is ignored by the processor. 00917 If required, a debugger can use it to store additional information about the breakpoint. 00918 */ 00919 #define __BKPT(value) __ASM volatile ("bkpt "#value) 00920 00921 00922 /** 00923 \brief Reverse bit order of value 00924 \details Reverses the bit order of the given value. 00925 \param [in] value Value to reverse 00926 \return Reversed value 00927 */ 00928 #define __RBIT __builtin_arm_rbit 00929 00930 /** 00931 \brief Count leading zeros 00932 \details Counts the number of leading zeros of a data value. 00933 \param [in] value Value to count the leading zeros 00934 \return number of leading zeros in value 00935 */ 00936 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value) 00937 { 00938 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally 00939 __builtin_clz(0) is undefined behaviour, so handle this case specially. 00940 This guarantees ARM-compatible results if happening to compile on a non-ARM 00941 target, and ensures the compiler doesn't decide to activate any 00942 optimisations using the logic "value was passed to __builtin_clz, so it 00943 is non-zero". 00944 ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a 00945 single CLZ instruction. 00946 */ 00947 if (value == 0U) 00948 { 00949 return 32U; 00950 } 00951 return __builtin_clz(value); 00952 } 00953 00954 00955 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 00956 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 00957 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 00958 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 00959 /** 00960 \brief LDR Exclusive (8 bit) 00961 \details Executes a exclusive LDR instruction for 8 bit value. 00962 \param [in] ptr Pointer to data 00963 \return value of type uint8_t at (*ptr) 00964 */ 00965 #define __LDREXB (uint8_t)__builtin_arm_ldrex 00966 00967 00968 /** 00969 \brief LDR Exclusive (16 bit) 00970 \details Executes a exclusive LDR instruction for 16 bit values. 00971 \param [in] ptr Pointer to data 00972 \return value of type uint16_t at (*ptr) 00973 */ 00974 #define __LDREXH (uint16_t)__builtin_arm_ldrex 00975 00976 00977 /** 00978 \brief LDR Exclusive (32 bit) 00979 \details Executes a exclusive LDR instruction for 32 bit values. 00980 \param [in] ptr Pointer to data 00981 \return value of type uint32_t at (*ptr) 00982 */ 00983 #define __LDREXW (uint32_t)__builtin_arm_ldrex 00984 00985 00986 /** 00987 \brief STR Exclusive (8 bit) 00988 \details Executes a exclusive STR instruction for 8 bit values. 00989 \param [in] value Value to store 00990 \param [in] ptr Pointer to location 00991 \return 0 Function succeeded 00992 \return 1 Function failed 00993 */ 00994 #define __STREXB (uint32_t)__builtin_arm_strex 00995 00996 00997 /** 00998 \brief STR Exclusive (16 bit) 00999 \details Executes a exclusive STR instruction for 16 bit values. 01000 \param [in] value Value to store 01001 \param [in] ptr Pointer to location 01002 \return 0 Function succeeded 01003 \return 1 Function failed 01004 */ 01005 #define __STREXH (uint32_t)__builtin_arm_strex 01006 01007 01008 /** 01009 \brief STR Exclusive (32 bit) 01010 \details Executes a exclusive STR instruction for 32 bit values. 01011 \param [in] value Value to store 01012 \param [in] ptr Pointer to location 01013 \return 0 Function succeeded 01014 \return 1 Function failed 01015 */ 01016 #define __STREXW (uint32_t)__builtin_arm_strex 01017 01018 01019 /** 01020 \brief Remove the exclusive lock 01021 \details Removes the exclusive lock which is created by LDREX. 01022 */ 01023 #define __CLREX __builtin_arm_clrex 01024 01025 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 01026 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 01027 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 01028 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */ 01029 01030 01031 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 01032 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 01033 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 01034 01035 /** 01036 \brief Signed Saturate 01037 \details Saturates a signed value. 01038 \param [in] value Value to be saturated 01039 \param [in] sat Bit position to saturate to (1..32) 01040 \return Saturated value 01041 */ 01042 #define __SSAT __builtin_arm_ssat 01043 01044 01045 /** 01046 \brief Unsigned Saturate 01047 \details Saturates an unsigned value. 01048 \param [in] value Value to be saturated 01049 \param [in] sat Bit position to saturate to (0..31) 01050 \return Saturated value 01051 */ 01052 #define __USAT __builtin_arm_usat 01053 01054 01055 /** 01056 \brief Rotate Right with Extend (32 bit) 01057 \details Moves each bit of a bitstring right by one bit. 01058 The carry input is shifted in at the left end of the bitstring. 01059 \param [in] value Value to rotate 01060 \return Rotated value 01061 */ 01062 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value) 01063 { 01064 uint32_t result; 01065 01066 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 01067 return(result); 01068 } 01069 01070 01071 /** 01072 \brief LDRT Unprivileged (8 bit) 01073 \details Executes a Unprivileged LDRT instruction for 8 bit value. 01074 \param [in] ptr Pointer to data 01075 \return value of type uint8_t at (*ptr) 01076 */ 01077 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr) 01078 { 01079 uint32_t result; 01080 01081 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) ); 01082 return ((uint8_t) result); /* Add explicit type cast here */ 01083 } 01084 01085 01086 /** 01087 \brief LDRT Unprivileged (16 bit) 01088 \details Executes a Unprivileged LDRT instruction for 16 bit values. 01089 \param [in] ptr Pointer to data 01090 \return value of type uint16_t at (*ptr) 01091 */ 01092 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr) 01093 { 01094 uint32_t result; 01095 01096 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) ); 01097 return ((uint16_t) result); /* Add explicit type cast here */ 01098 } 01099 01100 01101 /** 01102 \brief LDRT Unprivileged (32 bit) 01103 \details Executes a Unprivileged LDRT instruction for 32 bit values. 01104 \param [in] ptr Pointer to data 01105 \return value of type uint32_t at (*ptr) 01106 */ 01107 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr) 01108 { 01109 uint32_t result; 01110 01111 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) ); 01112 return(result); 01113 } 01114 01115 01116 /** 01117 \brief STRT Unprivileged (8 bit) 01118 \details Executes a Unprivileged STRT instruction for 8 bit values. 01119 \param [in] value Value to store 01120 \param [in] ptr Pointer to location 01121 */ 01122 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr) 01123 { 01124 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); 01125 } 01126 01127 01128 /** 01129 \brief STRT Unprivileged (16 bit) 01130 \details Executes a Unprivileged STRT instruction for 16 bit values. 01131 \param [in] value Value to store 01132 \param [in] ptr Pointer to location 01133 */ 01134 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr) 01135 { 01136 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); 01137 } 01138 01139 01140 /** 01141 \brief STRT Unprivileged (32 bit) 01142 \details Executes a Unprivileged STRT instruction for 32 bit values. 01143 \param [in] value Value to store 01144 \param [in] ptr Pointer to location 01145 */ 01146 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr) 01147 { 01148 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) ); 01149 } 01150 01151 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 01152 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 01153 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */ 01154 01155 /** 01156 \brief Signed Saturate 01157 \details Saturates a signed value. 01158 \param [in] value Value to be saturated 01159 \param [in] sat Bit position to saturate to (1..32) 01160 \return Saturated value 01161 */ 01162 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat) 01163 { 01164 if ((sat >= 1U) && (sat <= 32U)) 01165 { 01166 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U); 01167 const int32_t min = -1 - max ; 01168 if (val > max) 01169 { 01170 return max; 01171 } 01172 else if (val < min) 01173 { 01174 return min; 01175 } 01176 } 01177 return val; 01178 } 01179 01180 /** 01181 \brief Unsigned Saturate 01182 \details Saturates an unsigned value. 01183 \param [in] value Value to be saturated 01184 \param [in] sat Bit position to saturate to (0..31) 01185 \return Saturated value 01186 */ 01187 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat) 01188 { 01189 if (sat <= 31U) 01190 { 01191 const uint32_t max = ((1U << sat) - 1U); 01192 if (val > (int32_t)max) 01193 { 01194 return max; 01195 } 01196 else if (val < 0) 01197 { 01198 return 0U; 01199 } 01200 } 01201 return (uint32_t)val; 01202 } 01203 01204 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 01205 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 01206 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */ 01207 01208 01209 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 01210 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 01211 /** 01212 \brief Load-Acquire (8 bit) 01213 \details Executes a LDAB instruction for 8 bit value. 01214 \param [in] ptr Pointer to data 01215 \return value of type uint8_t at (*ptr) 01216 */ 01217 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr) 01218 { 01219 uint32_t result; 01220 01221 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 01222 return ((uint8_t) result); 01223 } 01224 01225 01226 /** 01227 \brief Load-Acquire (16 bit) 01228 \details Executes a LDAH instruction for 16 bit values. 01229 \param [in] ptr Pointer to data 01230 \return value of type uint16_t at (*ptr) 01231 */ 01232 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr) 01233 { 01234 uint32_t result; 01235 01236 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 01237 return ((uint16_t) result); 01238 } 01239 01240 01241 /** 01242 \brief Load-Acquire (32 bit) 01243 \details Executes a LDA instruction for 32 bit values. 01244 \param [in] ptr Pointer to data 01245 \return value of type uint32_t at (*ptr) 01246 */ 01247 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr) 01248 { 01249 uint32_t result; 01250 01251 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 01252 return(result); 01253 } 01254 01255 01256 /** 01257 \brief Store-Release (8 bit) 01258 \details Executes a STLB instruction for 8 bit values. 01259 \param [in] value Value to store 01260 \param [in] ptr Pointer to location 01261 */ 01262 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr) 01263 { 01264 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 01265 } 01266 01267 01268 /** 01269 \brief Store-Release (16 bit) 01270 \details Executes a STLH instruction for 16 bit values. 01271 \param [in] value Value to store 01272 \param [in] ptr Pointer to location 01273 */ 01274 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr) 01275 { 01276 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 01277 } 01278 01279 01280 /** 01281 \brief Store-Release (32 bit) 01282 \details Executes a STL instruction for 32 bit values. 01283 \param [in] value Value to store 01284 \param [in] ptr Pointer to location 01285 */ 01286 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr) 01287 { 01288 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 01289 } 01290 01291 01292 /** 01293 \brief Load-Acquire Exclusive (8 bit) 01294 \details Executes a LDAB exclusive instruction for 8 bit value. 01295 \param [in] ptr Pointer to data 01296 \return value of type uint8_t at (*ptr) 01297 */ 01298 #define __LDAEXB (uint8_t)__builtin_arm_ldaex 01299 01300 01301 /** 01302 \brief Load-Acquire Exclusive (16 bit) 01303 \details Executes a LDAH exclusive instruction for 16 bit values. 01304 \param [in] ptr Pointer to data 01305 \return value of type uint16_t at (*ptr) 01306 */ 01307 #define __LDAEXH (uint16_t)__builtin_arm_ldaex 01308 01309 01310 /** 01311 \brief Load-Acquire Exclusive (32 bit) 01312 \details Executes a LDA exclusive instruction for 32 bit values. 01313 \param [in] ptr Pointer to data 01314 \return value of type uint32_t at (*ptr) 01315 */ 01316 #define __LDAEX (uint32_t)__builtin_arm_ldaex 01317 01318 01319 /** 01320 \brief Store-Release Exclusive (8 bit) 01321 \details Executes a STLB exclusive instruction for 8 bit values. 01322 \param [in] value Value to store 01323 \param [in] ptr Pointer to location 01324 \return 0 Function succeeded 01325 \return 1 Function failed 01326 */ 01327 #define __STLEXB (uint32_t)__builtin_arm_stlex 01328 01329 01330 /** 01331 \brief Store-Release Exclusive (16 bit) 01332 \details Executes a STLH exclusive instruction for 16 bit values. 01333 \param [in] value Value to store 01334 \param [in] ptr Pointer to location 01335 \return 0 Function succeeded 01336 \return 1 Function failed 01337 */ 01338 #define __STLEXH (uint32_t)__builtin_arm_stlex 01339 01340 01341 /** 01342 \brief Store-Release Exclusive (32 bit) 01343 \details Executes a STL exclusive instruction for 32 bit values. 01344 \param [in] value Value to store 01345 \param [in] ptr Pointer to location 01346 \return 0 Function succeeded 01347 \return 1 Function failed 01348 */ 01349 #define __STLEX (uint32_t)__builtin_arm_stlex 01350 01351 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 01352 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */ 01353 01354 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */ 01355 01356 01357 /* ################### Compiler specific Intrinsics ########################### */ 01358 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics 01359 Access to dedicated SIMD instructions 01360 @{ 01361 */ 01362 01363 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) 01364 01365 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2) 01366 { 01367 uint32_t result; 01368 01369 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01370 return(result); 01371 } 01372 01373 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2) 01374 { 01375 uint32_t result; 01376 01377 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01378 return(result); 01379 } 01380 01381 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2) 01382 { 01383 uint32_t result; 01384 01385 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01386 return(result); 01387 } 01388 01389 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2) 01390 { 01391 uint32_t result; 01392 01393 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01394 return(result); 01395 } 01396 01397 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2) 01398 { 01399 uint32_t result; 01400 01401 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01402 return(result); 01403 } 01404 01405 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2) 01406 { 01407 uint32_t result; 01408 01409 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01410 return(result); 01411 } 01412 01413 01414 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2) 01415 { 01416 uint32_t result; 01417 01418 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01419 return(result); 01420 } 01421 01422 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2) 01423 { 01424 uint32_t result; 01425 01426 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01427 return(result); 01428 } 01429 01430 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2) 01431 { 01432 uint32_t result; 01433 01434 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01435 return(result); 01436 } 01437 01438 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2) 01439 { 01440 uint32_t result; 01441 01442 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01443 return(result); 01444 } 01445 01446 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2) 01447 { 01448 uint32_t result; 01449 01450 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01451 return(result); 01452 } 01453 01454 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2) 01455 { 01456 uint32_t result; 01457 01458 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01459 return(result); 01460 } 01461 01462 01463 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2) 01464 { 01465 uint32_t result; 01466 01467 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01468 return(result); 01469 } 01470 01471 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2) 01472 { 01473 uint32_t result; 01474 01475 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01476 return(result); 01477 } 01478 01479 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2) 01480 { 01481 uint32_t result; 01482 01483 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01484 return(result); 01485 } 01486 01487 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2) 01488 { 01489 uint32_t result; 01490 01491 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01492 return(result); 01493 } 01494 01495 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2) 01496 { 01497 uint32_t result; 01498 01499 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01500 return(result); 01501 } 01502 01503 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2) 01504 { 01505 uint32_t result; 01506 01507 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01508 return(result); 01509 } 01510 01511 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2) 01512 { 01513 uint32_t result; 01514 01515 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01516 return(result); 01517 } 01518 01519 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2) 01520 { 01521 uint32_t result; 01522 01523 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01524 return(result); 01525 } 01526 01527 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2) 01528 { 01529 uint32_t result; 01530 01531 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01532 return(result); 01533 } 01534 01535 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2) 01536 { 01537 uint32_t result; 01538 01539 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01540 return(result); 01541 } 01542 01543 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2) 01544 { 01545 uint32_t result; 01546 01547 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01548 return(result); 01549 } 01550 01551 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2) 01552 { 01553 uint32_t result; 01554 01555 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01556 return(result); 01557 } 01558 01559 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2) 01560 { 01561 uint32_t result; 01562 01563 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01564 return(result); 01565 } 01566 01567 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2) 01568 { 01569 uint32_t result; 01570 01571 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01572 return(result); 01573 } 01574 01575 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2) 01576 { 01577 uint32_t result; 01578 01579 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01580 return(result); 01581 } 01582 01583 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2) 01584 { 01585 uint32_t result; 01586 01587 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01588 return(result); 01589 } 01590 01591 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2) 01592 { 01593 uint32_t result; 01594 01595 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01596 return(result); 01597 } 01598 01599 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2) 01600 { 01601 uint32_t result; 01602 01603 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01604 return(result); 01605 } 01606 01607 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2) 01608 { 01609 uint32_t result; 01610 01611 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01612 return(result); 01613 } 01614 01615 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2) 01616 { 01617 uint32_t result; 01618 01619 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01620 return(result); 01621 } 01622 01623 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2) 01624 { 01625 uint32_t result; 01626 01627 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01628 return(result); 01629 } 01630 01631 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2) 01632 { 01633 uint32_t result; 01634 01635 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01636 return(result); 01637 } 01638 01639 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2) 01640 { 01641 uint32_t result; 01642 01643 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01644 return(result); 01645 } 01646 01647 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2) 01648 { 01649 uint32_t result; 01650 01651 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01652 return(result); 01653 } 01654 01655 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2) 01656 { 01657 uint32_t result; 01658 01659 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01660 return(result); 01661 } 01662 01663 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3) 01664 { 01665 uint32_t result; 01666 01667 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 01668 return(result); 01669 } 01670 01671 #define __SSAT16(ARG1,ARG2) \ 01672 ({ \ 01673 int32_t __RES, __ARG1 = (ARG1); \ 01674 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 01675 __RES; \ 01676 }) 01677 01678 #define __USAT16(ARG1,ARG2) \ 01679 ({ \ 01680 uint32_t __RES, __ARG1 = (ARG1); \ 01681 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \ 01682 __RES; \ 01683 }) 01684 01685 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1) 01686 { 01687 uint32_t result; 01688 01689 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1)); 01690 return(result); 01691 } 01692 01693 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2) 01694 { 01695 uint32_t result; 01696 01697 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01698 return(result); 01699 } 01700 01701 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1) 01702 { 01703 uint32_t result; 01704 01705 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1)); 01706 return(result); 01707 } 01708 01709 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2) 01710 { 01711 uint32_t result; 01712 01713 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01714 return(result); 01715 } 01716 01717 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2) 01718 { 01719 uint32_t result; 01720 01721 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01722 return(result); 01723 } 01724 01725 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2) 01726 { 01727 uint32_t result; 01728 01729 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01730 return(result); 01731 } 01732 01733 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3) 01734 { 01735 uint32_t result; 01736 01737 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 01738 return(result); 01739 } 01740 01741 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3) 01742 { 01743 uint32_t result; 01744 01745 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 01746 return(result); 01747 } 01748 01749 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc) 01750 { 01751 union llreg_u{ 01752 uint32_t w32[2]; 01753 uint64_t w64; 01754 } llr; 01755 llr.w64 = acc; 01756 01757 #ifndef __ARMEB__ /* Little endian */ 01758 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 01759 #else /* Big endian */ 01760 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 01761 #endif 01762 01763 return(llr.w64); 01764 } 01765 01766 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc) 01767 { 01768 union llreg_u{ 01769 uint32_t w32[2]; 01770 uint64_t w64; 01771 } llr; 01772 llr.w64 = acc; 01773 01774 #ifndef __ARMEB__ /* Little endian */ 01775 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 01776 #else /* Big endian */ 01777 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 01778 #endif 01779 01780 return(llr.w64); 01781 } 01782 01783 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2) 01784 { 01785 uint32_t result; 01786 01787 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01788 return(result); 01789 } 01790 01791 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2) 01792 { 01793 uint32_t result; 01794 01795 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01796 return(result); 01797 } 01798 01799 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3) 01800 { 01801 uint32_t result; 01802 01803 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 01804 return(result); 01805 } 01806 01807 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3) 01808 { 01809 uint32_t result; 01810 01811 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 01812 return(result); 01813 } 01814 01815 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc) 01816 { 01817 union llreg_u{ 01818 uint32_t w32[2]; 01819 uint64_t w64; 01820 } llr; 01821 llr.w64 = acc; 01822 01823 #ifndef __ARMEB__ /* Little endian */ 01824 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 01825 #else /* Big endian */ 01826 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 01827 #endif 01828 01829 return(llr.w64); 01830 } 01831 01832 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc) 01833 { 01834 union llreg_u{ 01835 uint32_t w32[2]; 01836 uint64_t w64; 01837 } llr; 01838 llr.w64 = acc; 01839 01840 #ifndef __ARMEB__ /* Little endian */ 01841 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 01842 #else /* Big endian */ 01843 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 01844 #endif 01845 01846 return(llr.w64); 01847 } 01848 01849 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2) 01850 { 01851 uint32_t result; 01852 01853 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01854 return(result); 01855 } 01856 01857 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2) 01858 { 01859 int32_t result; 01860 01861 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01862 return(result); 01863 } 01864 01865 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2) 01866 { 01867 int32_t result; 01868 01869 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 01870 return(result); 01871 } 01872 01873 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \ 01874 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) ) 01875 01876 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \ 01877 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) ) 01878 01879 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3) 01880 { 01881 int32_t result; 01882 01883 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); 01884 return(result); 01885 } 01886 01887 #endif /* (__ARM_FEATURE_DSP == 1) */ 01888 /*@} end of group CMSIS_SIMD_intrinsics */ 01889 01890 01891 #endif /* __CMSIS_ARMCLANG_H */
Generated on Tue Jul 12 2022 15:37:13 by
1.7.2