26 #ifndef RTX_CORE_CA_H_    27 #define RTX_CORE_CA_H_    30 #include "RTE_Components.h"    31 #include CMSIS_device_header    36 #define FALSE                   ((bool_t)0)    37 #define TRUE                    ((bool_t)1)    40 #define EXCLUSIVE_ACCESS        1    42 #define OS_TICK_HANDLER         osRtxTick_Handler    45 #define CPSR_T_BIT              0x20U    46 #define CPSR_I_BIT              0x80U    47 #define CPSR_F_BIT              0x40U    50 #define CPSR_MODE_USER          0x10U    51 #define CPSR_MODE_SYSTEM        0x1FU    57 __STATIC_INLINE uint32_t xPSR_InitVal (bool_t privileged, bool_t thumb) {
    62       psr = CPSR_MODE_SYSTEM | CPSR_T_BIT;
    64       psr = CPSR_MODE_SYSTEM;
    68       psr = CPSR_MODE_USER   | CPSR_T_BIT;
    83 #define STACK_FRAME_INIT_VAL    0x00U    88 __STATIC_INLINE uint32_t StackOffsetR0 (uint8_t stack_frame) {
    91   if        ((stack_frame & 0x04U) != 0U) {
    92     offset = (32U*8U) + (2U*4U) + (8U*4U);
    93   } 
else if ((stack_frame & 0x02U) != 0U) {
    94     offset = (16U*8U) + (2U*4U) + (8U*4U);
   106 #if defined(__CC_ARM)   109 static __asm    uint32_t __get_PSP (
void) {
   121 __attribute__((target(
"arm")))
   123 __STATIC_INLINE uint32_t __get_PSP (
void) {
   124   register uint32_t ret;
   130     "sub  %[ret],%[ret],#32\n\t"   142 __STATIC_INLINE 
void __set_CONTROL(uint32_t control) {
   151 __STATIC_INLINE bool_t IsPrivileged (
void) {
   152   return (__get_mode() != CPSR_MODE_USER);
   157 __STATIC_INLINE bool_t IsIrqMode (
void) {
   158   return ((__get_mode() != CPSR_MODE_USER) && (__get_mode() != CPSR_MODE_SYSTEM));
   163 __STATIC_INLINE bool_t IsIrqMasked (
void) {
   170 extern uint8_t IRQ_PendSV;
   173 __STATIC_INLINE 
void SVC_Setup (
void) {
   178 __STATIC_INLINE uint8_t GetPendSV (
void) {
   183 __STATIC_INLINE 
void ClrPendSV (
void) {
   188 __STATIC_INLINE 
void SetPendSV (
void) {
   195 #if defined(__CC_ARM)   197 #define __SVC_INDIRECT(n) __svc_indirect(n)   199 #define SVC0_0N(f,t)                                                           \   200 __SVC_INDIRECT(0) t    svc##f (t(*)());                                        \   201 __attribute__((always_inline))                                                 \   202 __STATIC_INLINE   t  __svc##f (void) {                                         \   206 #define SVC0_0(f,t)                                                            \   207 __SVC_INDIRECT(0) t    svc##f (t(*)());                                        \   208 __attribute__((always_inline))                                                 \   209 __STATIC_INLINE   t  __svc##f (void) {                                         \   210   return svc##f(svcRtx##f);                                                    \   213 #define SVC0_1N(f,t,t1)                                                        \   214 __SVC_INDIRECT(0) t    svc##f (t(*)(t1),t1);                                   \   215 __attribute__((always_inline))                                                 \   216 __STATIC_INLINE   t  __svc##f (t1 a1) {                                        \   217   svc##f(svcRtx##f,a1);                                                        \   220 #define SVC0_1(f,t,t1)                                                         \   221 __SVC_INDIRECT(0) t    svc##f (t(*)(t1),t1);                                   \   222 __attribute__((always_inline))                                                 \   223 __STATIC_INLINE   t  __svc##f (t1 a1) {                                        \   224   return svc##f(svcRtx##f,a1);                                                 \   227 #define SVC0_2(f,t,t1,t2)                                                      \   228 __SVC_INDIRECT(0) t    svc##f (t(*)(t1,t2),t1,t2);                             \   229 __attribute__((always_inline))                                                 \   230 __STATIC_INLINE   t  __svc##f (t1 a1, t2 a2) {                                 \   231   return svc##f(svcRtx##f,a1,a2);                                              \   234 #define SVC0_3(f,t,t1,t2,t3)                                                   \   235 __SVC_INDIRECT(0) t    svc##f (t(*)(t1,t2,t3),t1,t2,t3);                       \   236 __attribute__((always_inline))                                                 \   237 __STATIC_INLINE   t  __svc##f (t1 a1, t2 a2, t3 a3) {                          \   238   return svc##f(svcRtx##f,a1,a2,a3);                                           \   241 #define SVC0_4(f,t,t1,t2,t3,t4)                                                \   242 __SVC_INDIRECT(0) t    svc##f (t(*)(t1,t2,t3,t4),t1,t2,t3,t4);                 \   243 __attribute__((always_inline))                                                 \   244 __STATIC_INLINE   t  __svc##f (t1 a1, t2 a2, t3 a3, t4 a4) {                   \   245   return svc##f(svcRtx##f,a1,a2,a3,a4);                                        \   248 #elif defined(__ICCARM__)   250 #define SVC_ArgF(f)                                                            \   256 #define STRINGIFY(a) #a   257 #define __SVC_INDIRECT(n) _Pragma(STRINGIFY(swi_number = n)) __swi   259 #define SVC0_0N(f,t)                                                           \   260 __SVC_INDIRECT(0) t    svc##f ();                                              \   261 __attribute__((always_inline))                                                 \   262 __STATIC_INLINE   t  __svc##f (void) {                                         \   263   SVC_ArgF(svcRtx##f);                                                         \   267 #define SVC0_0(f,t)                                                            \   268 __SVC_INDIRECT(0) t    svc##f ();                                              \   269 __attribute__((always_inline))                                                 \   270 __STATIC_INLINE   t  __svc##f (void) {                                         \   271   SVC_ArgF(svcRtx##f);                                                         \   275 #define SVC0_1N(f,t,t1)                                                        \   276 __SVC_INDIRECT(0) t    svc##f (t1 a1);                                         \   277 __attribute__((always_inline))                                                 \   278 __STATIC_INLINE   t  __svc##f (t1 a1) {                                        \   279   SVC_ArgF(svcRtx##f);                                                         \   283 #define SVC0_1(f,t,t1)                                                         \   284 __SVC_INDIRECT(0) t    svc##f (t1 a1);                                         \   285 __attribute__((always_inline))                                                 \   286 __STATIC_INLINE   t  __svc##f (t1 a1) {                                        \   287   SVC_ArgF(svcRtx##f);                                                         \   291 #define SVC0_2(f,t,t1,t2)                                                      \   292 __SVC_INDIRECT(0) t    svc##f (t1 a1, t2 a2);                                  \   293 __attribute__((always_inline))                                                 \   294 __STATIC_INLINE   t  __svc##f (t1 a1, t2 a2) {                                 \   295   SVC_ArgF(svcRtx##f);                                                         \   296   return svc##f(a1,a2);                                                        \   299 #define SVC0_3(f,t,t1,t2,t3)                                                   \   300 __SVC_INDIRECT(0) t    svc##f (t1 a1, t2 a2, t3 a3);                           \   301 __attribute__((always_inline))                                                 \   302 __STATIC_INLINE   t  __svc##f (t1 a1, t2 a2, t3 a3) {                          \   303   SVC_ArgF(svcRtx##f);                                                         \   304   return svc##f(a1,a2,a3);                                                     \   307 #define SVC0_4(f,t,t1,t2,t3,t4)                                                \   308 __SVC_INDIRECT(0) t    svc##f (t1 a1, t2 a2, t3 a3, t4 a4);                    \   309 __attribute__((always_inline))                                                 \   310 __STATIC_INLINE   t  __svc##f (t1 a1, t2 a2, t3 a3, t4 a4) {                   \   311   SVC_ArgF(svcRtx##f);                                                         \   312   return svc##f(a1,a2,a3,a4);                                                  \   315 #else   // !(defined(__CC_ARM) || defined(__ICCARM__))   317 #define SVC_RegF "r12"   319 #define SVC_ArgN(n) \   320 register uint32_t __r##n __ASM("r"#n)   322 #define SVC_ArgR(n,a) \   323 register uint32_t __r##n __ASM("r"#n) = (uint32_t)a   325 #define SVC_ArgF(f) \   326 register uint32_t __rf   __ASM(SVC_RegF) = (uint32_t)f   328 #define SVC_In0 "r"(__rf)   329 #define SVC_In1 "r"(__rf),"r"(__r0)   330 #define SVC_In2 "r"(__rf),"r"(__r0),"r"(__r1)   331 #define SVC_In3 "r"(__rf),"r"(__r0),"r"(__r1),"r"(__r2)   332 #define SVC_In4 "r"(__rf),"r"(__r0),"r"(__r1),"r"(__r2),"r"(__r3)   335 #define SVC_Out1 "=r"(__r0)   339 #define SVC_CL2 "r0","r1"   341 #define SVC_Call0(in, out, cl)                                                 \   342   __ASM volatile ("svc 0" : out : in : cl)   344 #define SVC0_0N(f,t)                                                           \   345 __attribute__((always_inline))                                                 \   346 __STATIC_INLINE t __svc##f (void) {                                            \   347   SVC_ArgF(svcRtx##f);                                                         \   348   SVC_Call0(SVC_In0, SVC_Out0, SVC_CL2);                                       \   351 #define SVC0_0(f,t)                                                            \   352 __attribute__((always_inline))                                                 \   353 __STATIC_INLINE t __svc##f (void) {                                            \   355   SVC_ArgF(svcRtx##f);                                                         \   356   SVC_Call0(SVC_In0, SVC_Out1, SVC_CL1);                                       \   360 #define SVC0_1N(f,t,t1)                                                        \   361 __attribute__((always_inline))                                                 \   362 __STATIC_INLINE t __svc##f (t1 a1) {                                           \   364   SVC_ArgF(svcRtx##f);                                                         \   365   SVC_Call0(SVC_In1, SVC_Out0, SVC_CL1);                                       \   368 #define SVC0_1(f,t,t1)                                                         \   369 __attribute__((always_inline))                                                 \   370 __STATIC_INLINE t __svc##f (t1 a1) {                                           \   372   SVC_ArgF(svcRtx##f);                                                         \   373   SVC_Call0(SVC_In1, SVC_Out1, SVC_CL1);                                       \   377 #define SVC0_2(f,t,t1,t2)                                                      \   378 __attribute__((always_inline))                                                 \   379 __STATIC_INLINE t __svc##f (t1 a1, t2 a2) {                                    \   382   SVC_ArgF(svcRtx##f);                                                         \   383   SVC_Call0(SVC_In2, SVC_Out1, SVC_CL0);                                       \   387 #define SVC0_3(f,t,t1,t2,t3)                                                   \   388 __attribute__((always_inline))                                                 \   389 __STATIC_INLINE t __svc##f (t1 a1, t2 a2, t3 a3) {                             \   393   SVC_ArgF(svcRtx##f);                                                         \   394   SVC_Call0(SVC_In3, SVC_Out1, SVC_CL0);                                       \   398 #define SVC0_4(f,t,t1,t2,t3,t4)                                                \   399 __attribute__((always_inline))                                                 \   400 __STATIC_INLINE t __svc##f (t1 a1, t2 a2, t3 a3, t4 a4) {                      \   405   SVC_ArgF(svcRtx##f);                                                         \   406   SVC_Call0(SVC_In4, SVC_Out1, SVC_CL0);                                       \   415 #if (EXCLUSIVE_ACCESS == 1)   421 #if defined(__CC_ARM)   422 static __asm    uint8_t atomic_wr8 (uint8_t *mem, uint8_t val) {
   432 __STATIC_INLINE uint8_t atomic_wr8 (uint8_t *mem, uint8_t val) {
   434 #pragma diag_suppress=Pe550   436   register uint32_t res;
   438 #pragma diag_default=Pe550   440   register uint8_t  ret;
   444   ".syntax unified\n\t"   447     "ldrexb %[ret],[%[mem]]\n\t"   448     "strexb %[res],%[val],[%[mem]]\n\t"   466 #if defined(__CC_ARM)   467 static __asm    uint32_t atomic_set32 (uint32_t *mem, uint32_t bits) {
   478 __STATIC_INLINE uint32_t atomic_set32 (uint32_t *mem, uint32_t bits) {
   480 #pragma diag_suppress=Pe550   482   register uint32_t val, res;
   484 #pragma diag_default=Pe550   486   register uint32_t ret;
   490   ".syntax unified\n\t"   493     "ldrex %[val],[%[mem]]\n\t"   494     "orr   %[ret],%[val],%[bits]\n\t"   495     "strex %[res],%[ret],[%[mem]]\n\t"   514 #if defined(__CC_ARM)   515 static __asm    uint32_t atomic_clr32 (uint32_t *mem, uint32_t bits) {
   527 __STATIC_INLINE uint32_t atomic_clr32 (uint32_t *mem, uint32_t bits) {
   529 #pragma diag_suppress=Pe550   531   register uint32_t val, res;
   533 #pragma diag_default=Pe550   535   register uint32_t ret;
   539   ".syntax unified\n\t"   542     "ldrex %[ret],[%[mem]]\n\t"   543     "bic   %[val],%[ret],%[bits]\n\t"   544     "strex %[res],%[val],[%[mem]]\n\t"   563 #if defined(__CC_ARM)   564 static __asm    uint32_t atomic_chk32_all (uint32_t *mem, uint32_t bits) {
   583 __STATIC_INLINE uint32_t atomic_chk32_all (uint32_t *mem, uint32_t bits) {
   585 #pragma diag_suppress=Pe550   587   register uint32_t val, res;
   589 #pragma diag_default=Pe550   591   register uint32_t ret;
   595   ".syntax unified\n\t"   598     "ldrex %[ret],[%[mem]]\n\t"   599     "and   %[val],%[ret],%[bits]\n\t"   600     "cmp   %[val],%[bits]\n\t"   606     "bic   %[val],%[ret],%[bits]\n\t"   607     "strex %[res],%[val],[%[mem]]\n\t"   627 #if defined(__CC_ARM)   628 static __asm    uint32_t atomic_chk32_any (uint32_t *mem, uint32_t bits) {
   646 __STATIC_INLINE uint32_t atomic_chk32_any (uint32_t *mem, uint32_t bits) {
   648 #pragma diag_suppress=Pe550   650   register uint32_t val, res;
   652 #pragma diag_default=Pe550   654   register uint32_t ret;
   658   ".syntax unified\n\t"   661     "ldrex %[ret],[%[mem]]\n\t"   662     "tst   %[ret],%[bits]\n\t"   668     "bic   %[val],%[ret],%[bits]\n\t"   669     "strex %[res],%[val],[%[mem]]\n\t"   688 #if defined(__CC_ARM)   689 static __asm    uint32_t atomic_inc32 (uint32_t *mem) {
   700 __STATIC_INLINE uint32_t atomic_inc32 (uint32_t *mem) {
   702 #pragma diag_suppress=Pe550   704   register uint32_t val, res;
   706 #pragma diag_default=Pe550   708   register uint32_t ret;
   712   ".syntax unified\n\t"   715     "ldrex %[ret],[%[mem]]\n\t"   716     "adds  %[val],%[ret],#1\n\t"   717     "strex %[res],%[val],[%[mem]]\n\t"   735 #if defined(__CC_ARM)   736 static __asm    uint16_t atomic_inc16_lt (uint16_t *mem, uint16_t max) {
   753 __STATIC_INLINE uint16_t atomic_inc16_lt (uint16_t *mem, uint16_t max) {
   755 #pragma diag_suppress=Pe550   757   register uint32_t val, res;
   759 #pragma diag_default=Pe550   761   register uint16_t ret;
   765   ".syntax unified\n\t"   768     "ldrexh %[ret],[%[mem]]\n\t"   769     "cmp    %[max],%[ret]\n\t"   774     "adds   %[val],%[ret],#1\n\t"   775     "strexh %[res],%[val],[%[mem]]\n\t"   795 #if defined(__CC_ARM)   796 static __asm    uint16_t atomic_inc16_lim (uint16_t *mem, uint16_t lim) {
   812 __STATIC_INLINE uint16_t atomic_inc16_lim (uint16_t *mem, uint16_t lim) {
   814 #pragma diag_suppress=Pe550   816   register uint32_t val, res;
   818 #pragma diag_default=Pe550   820   register uint16_t ret;
   824   ".syntax unified\n\t"   827     "ldrexh %[ret],[%[mem]]\n\t"   828     "adds   %[val],%[ret],#1\n\t"   829     "cmp    %[lim],%[val]\n\t"   833     "strexh %[res],%[val],[%[mem]]\n\t"   851 #if defined(__CC_ARM)   852 static __asm    uint32_t atomic_dec32 (uint32_t *mem) {
   863 __STATIC_INLINE uint32_t atomic_dec32 (uint32_t *mem) {
   865 #pragma diag_suppress=Pe550   867   register uint32_t val, res;
   869 #pragma diag_default=Pe550   871   register uint32_t ret;
   875   ".syntax unified\n\t"   878     "ldrex %[ret],[%[mem]]\n\t"   879     "subs  %[val],%[ret],#1\n\t"   880     "strex %[res],%[val],[%[mem]]\n\t"   897 #if defined(__CC_ARM)   898 static __asm    uint32_t atomic_dec32_nz (uint32_t *mem) {
   914 __STATIC_INLINE uint32_t atomic_dec32_nz (uint32_t *mem) {
   916 #pragma diag_suppress=Pe550   918   register uint32_t val, res;
   920 #pragma diag_default=Pe550   922   register uint32_t ret;
   926   ".syntax unified\n\t"   929     "ldrex %[ret],[%[mem]]\n\t"   935     "subs  %[val],%[ret],#1\n\t"   936     "strex %[res],%[val],[%[mem]]\n\t"   954 #if defined(__CC_ARM)   955 static __asm    uint16_t atomic_dec16_nz (uint16_t *mem) {
   971 __STATIC_INLINE uint16_t atomic_dec16_nz (uint16_t *mem) {
   973 #pragma diag_suppress=Pe550   975   register uint32_t val, res;
   977 #pragma diag_default=Pe550   979   register uint16_t ret;
   983   ".syntax unified\n\t"   986     "ldrexh %[ret],[%[mem]]\n\t"   992     "subs   %[val],%[ret],#1\n\t"   993     "strexh %[res],%[val],[%[mem]]\n\t"  1011 #if defined(__CC_ARM)  1012 static __asm    
void *atomic_link_get (
void **root) {
  1028 __STATIC_INLINE 
void *atomic_link_get (
void **root) {
  1030 #pragma diag_suppress=Pe550  1032   register uint32_t val, res;
  1034 #pragma diag_default=Pe550  1040   ".syntax unified\n\t"  1043     "ldrex %[ret],[%[root]]\n\t"  1049     "ldr   %[val],[%[ret]]\n\t"  1050     "strex %[res],%[val],[%[root]]\n\t"  1054   : [ret]  
"=&l" (ret),
  1068 #if defined(__CC_ARM)  1069 static __asm    
void atomic_link_put (
void **root, 
void *link) {
  1084 __STATIC_INLINE 
void atomic_link_put (
void **root, 
void *link) {
  1086 #pragma diag_suppress=Pe550  1088   register uint32_t val1, val2, res;
  1090 #pragma diag_default=Pe550  1095   ".syntax unified\n\t"  1098     "ldr   %[val1],[%[root]]\n\t"  1099     "str   %[val1],[%[link]]\n\t"  1101     "ldrex %[val1],[%[root]]\n\t"  1102     "ldr   %[val2],[%[link]]\n\t"  1103     "cmp   %[val2],%[val1]\n\t"  1105     "strex %[res],%[link],[%[root]]\n\t"  1108   : [val1] 
"=&l" (val1),
  1109     [val2] 
"=&l" (val2),
  1111   : [root] 
"l"   (root),
  1118 #endif  // (EXCLUSIVE_ACCESS == 1)  1121 #endif  // RTX_CORE_CA_H_