Alessandro Angelino / mbed-hal

Dependencies:   target-freescale

Fork of mbed-hal by Morpheus

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers core_caFunc.h Source File

core_caFunc.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     core_caFunc.h
00003  * @brief    CMSIS Cortex-A Core Function Access Header File
00004  * @version  V3.10
00005  * @date     30 Oct 2013
00006  *
00007  * @note
00008  *
00009  ******************************************************************************/
00010 /* Copyright (c) 2009 - 2013 ARM LIMITED
00011 
00012    All rights reserved.
00013    Redistribution and use in source and binary forms, with or without
00014    modification, are permitted provided that the following conditions are met:
00015    - Redistributions of source code must retain the above copyright
00016      notice, this list of conditions and the following disclaimer.
00017    - Redistributions in binary form must reproduce the above copyright
00018      notice, this list of conditions and the following disclaimer in the
00019      documentation and/or other materials provided with the distribution.
00020    - Neither the name of ARM nor the names of its contributors may be used
00021      to endorse or promote products derived from this software without
00022      specific prior written permission.
00023    *
00024    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00025    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00026    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00027    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
00028    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00029    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00030    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00031    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00032    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00033    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00034    POSSIBILITY OF SUCH DAMAGE.
00035    ---------------------------------------------------------------------------*/
00036 
00037 
00038 #ifndef __CORE_CAFUNC_H__
00039 #define __CORE_CAFUNC_H__
00040 
00041 
00042 /* ###########################  Core Function Access  ########################### */
00043 /** \ingroup  CMSIS_Core_FunctionInterface
00044     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00045   @{
00046  */
00047 
00048 #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
00049 /* ARM armcc specific functions */
00050 
00051 #if (__ARMCC_VERSION < 400677)
00052   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
00053 #endif
00054 
00055 #define MODE_USR 0x10
00056 #define MODE_FIQ 0x11
00057 #define MODE_IRQ 0x12
00058 #define MODE_SVC 0x13
00059 #define MODE_MON 0x16
00060 #define MODE_ABT 0x17
00061 #define MODE_HYP 0x1A
00062 #define MODE_UND 0x1B
00063 #define MODE_SYS 0x1F
00064 
00065 /** \brief  Get APSR Register
00066 
00067     This function returns the content of the APSR Register.
00068 
00069     \return               APSR Register value
00070  */
00071 __STATIC_INLINE uint32_t __get_APSR(void)
00072 {
00073   register uint32_t __regAPSR          __ASM("apsr");
00074   return(__regAPSR);
00075 }
00076 
00077 
00078 /** \brief  Get CPSR Register
00079 
00080     This function returns the content of the CPSR Register.
00081 
00082     \return               CPSR Register value
00083  */
00084 __STATIC_INLINE uint32_t __get_CPSR(void)
00085 {
00086   register uint32_t __regCPSR          __ASM("cpsr");
00087   return(__regCPSR);
00088 }
00089 
00090 /** \brief  Set Stack Pointer
00091 
00092     This function assigns the given value to the current stack pointer.
00093 
00094     \param [in]    topOfStack  Stack Pointer value to set
00095  */
00096 register uint32_t __regSP              __ASM("sp");
00097 __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00098 {
00099     __regSP = topOfStack;
00100 }
00101 
00102 
00103 /** \brief  Get link register
00104 
00105     This function returns the value of the link register
00106 
00107     \return    Value of link register
00108  */
00109 register uint32_t __reglr         __ASM("lr");
00110 __STATIC_INLINE uint32_t __get_LR(void)
00111 {
00112   return(__reglr);
00113 }
00114 
00115 /** \brief  Set link register
00116 
00117     This function sets the value of the link register
00118 
00119     \param [in]    lr  LR value to set
00120  */
00121 __STATIC_INLINE void __set_LR(uint32_t lr)
00122 {
00123   __reglr = lr;
00124 }
00125 
00126 /** \brief  Set Process Stack Pointer
00127 
00128     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00129 
00130     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00131  */
00132 __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
00133 {
00134     ARM
00135     PRESERVE8
00136 
00137     BIC     R0, R0, #7  ;ensure stack is 8-byte aligned
00138     MRS     R1, CPSR
00139     CPS     #MODE_SYS   ;no effect in USR mode
00140     MOV     SP, R0
00141     MSR     CPSR_c, R1  ;no effect in USR mode
00142     ISB
00143     BX      LR
00144 
00145 }
00146 
00147 /** \brief  Set User Mode
00148 
00149     This function changes the processor state to User Mode
00150  */
00151 __STATIC_ASM void __set_CPS_USR(void)
00152 {
00153     ARM 
00154 
00155     CPS  #MODE_USR  
00156     BX   LR
00157 }
00158 
00159 
00160 /** \brief  Enable FIQ
00161 
00162     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00163     Can only be executed in Privileged modes.
00164  */
00165 #define __enable_fault_irq                __enable_fiq
00166 
00167 
00168 /** \brief  Disable FIQ
00169 
00170     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00171     Can only be executed in Privileged modes.
00172  */
00173 #define __disable_fault_irq               __disable_fiq
00174 
00175 
00176 /** \brief  Get FPSCR
00177 
00178     This function returns the current value of the Floating Point Status/Control register.
00179 
00180     \return               Floating Point Status/Control register value
00181  */
00182 __STATIC_INLINE uint32_t __get_FPSCR(void)
00183 {
00184 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00185   register uint32_t __regfpscr         __ASM("fpscr");
00186   return(__regfpscr);
00187 #else
00188    return(0);
00189 #endif
00190 }
00191 
00192 
00193 /** \brief  Set FPSCR
00194 
00195     This function assigns the given value to the Floating Point Status/Control register.
00196 
00197     \param [in]    fpscr  Floating Point Status/Control value to set
00198  */
00199 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
00200 {
00201 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00202   register uint32_t __regfpscr         __ASM("fpscr");
00203   __regfpscr = (fpscr);
00204 #endif
00205 }
00206 
00207 /** \brief  Get FPEXC
00208 
00209     This function returns the current value of the Floating Point Exception Control register.
00210 
00211     \return               Floating Point Exception Control register value
00212  */
00213 __STATIC_INLINE uint32_t __get_FPEXC(void)
00214 {
00215 #if (__FPU_PRESENT == 1)
00216   register uint32_t __regfpexc         __ASM("fpexc");
00217   return(__regfpexc);
00218 #else
00219    return(0);
00220 #endif
00221 }
00222 
00223 
00224 /** \brief  Set FPEXC
00225 
00226     This function assigns the given value to the Floating Point Exception Control register.
00227 
00228     \param [in]    fpscr  Floating Point Exception Control value to set
00229  */
00230 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
00231 {
00232 #if (__FPU_PRESENT == 1)
00233   register uint32_t __regfpexc         __ASM("fpexc");
00234   __regfpexc = (fpexc);
00235 #endif
00236 }
00237 
00238 /** \brief  Get CPACR
00239 
00240     This function returns the current value of the Coprocessor Access Control register.
00241 
00242     \return               Coprocessor Access Control register value
00243  */
00244 __STATIC_INLINE uint32_t __get_CPACR(void)
00245 {
00246     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00247     return __regCPACR;
00248 }
00249 
00250 /** \brief  Set CPACR
00251 
00252     This function assigns the given value to the Coprocessor Access Control register.
00253 
00254     \param [in]    cpacr  Coprocessor Acccess Control value to set
00255  */
00256 __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
00257 {
00258     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00259     __regCPACR = cpacr;
00260     __ISB();
00261 }
00262 
00263 /** \brief  Get CBAR
00264 
00265     This function returns the value of the Configuration Base Address register.
00266 
00267     \return               Configuration Base Address register value
00268  */
00269 __STATIC_INLINE uint32_t __get_CBAR() {
00270     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
00271     return(__regCBAR);
00272 }
00273 
00274 /** \brief  Get TTBR0
00275 
00276     This function returns the value of the Translation Table Base Register 0.
00277 
00278     \return               Translation Table Base Register 0 value
00279  */
00280 __STATIC_INLINE uint32_t __get_TTBR0() {
00281     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00282     return(__regTTBR0);
00283 }
00284 
00285 /** \brief  Set TTBR0
00286 
00287     This function assigns the given value to the Translation Table Base Register 0.
00288 
00289     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00290  */
00291 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00292     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00293     __regTTBR0 = ttbr0;
00294     __ISB();
00295 }
00296 
00297 /** \brief  Get DACR
00298 
00299     This function returns the value of the Domain Access Control Register.
00300 
00301     \return               Domain Access Control Register value
00302  */
00303 __STATIC_INLINE uint32_t __get_DACR() {
00304     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00305     return(__regDACR);
00306 }
00307 
00308 /** \brief  Set DACR
00309 
00310     This function assigns the given value to the Domain Access Control Register.
00311 
00312     \param [in]    dacr   Domain Access Control Register value to set
00313  */
00314 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00315     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00316     __regDACR = dacr;
00317     __ISB();
00318 }
00319 
00320 /******************************** Cache and BTAC enable  ****************************************************/
00321 
00322 /** \brief  Set SCTLR
00323 
00324     This function assigns the given value to the System Control Register.
00325 
00326     \param [in]    sctlr  System Control Register value to set
00327  */
00328 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
00329 {
00330     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00331     __regSCTLR = sctlr;
00332 }
00333 
00334 /** \brief  Get SCTLR
00335 
00336     This function returns the value of the System Control Register.
00337 
00338     \return               System Control Register value
00339  */
00340 __STATIC_INLINE uint32_t __get_SCTLR() {
00341     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00342     return(__regSCTLR);
00343 }
00344 
00345 /** \brief  Enable Caches
00346 
00347     Enable Caches
00348  */
00349 __STATIC_INLINE void __enable_caches(void) {
00350     // Set I bit 12 to enable I Cache
00351     // Set C bit  2 to enable D Cache
00352     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00353 }
00354 
00355 /** \brief  Disable Caches
00356 
00357     Disable Caches
00358  */
00359 __STATIC_INLINE void __disable_caches(void) {
00360     // Clear I bit 12 to disable I Cache
00361     // Clear C bit  2 to disable D Cache
00362     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
00363     __ISB();
00364 }
00365 
00366 /** \brief  Enable BTAC
00367 
00368     Enable BTAC
00369  */
00370 __STATIC_INLINE void __enable_btac(void) {
00371     // Set Z bit 11 to enable branch prediction
00372     __set_SCTLR( __get_SCTLR() | (1 << 11));
00373     __ISB();
00374 }
00375 
00376 /** \brief  Disable BTAC
00377 
00378     Disable BTAC
00379  */
00380 __STATIC_INLINE void __disable_btac(void) {
00381     // Clear Z bit 11 to disable branch prediction
00382     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
00383 }
00384 
00385 
00386 /** \brief  Enable MMU
00387 
00388     Enable MMU
00389  */
00390 __STATIC_INLINE void __enable_mmu(void) {
00391     // Set M bit 0 to enable the MMU
00392     // Set AFE bit to enable simplified access permissions model
00393     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00394     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00395     __ISB();
00396 }
00397 
00398 /** \brief  Disable MMU
00399 
00400     Disable MMU
00401  */
00402 __STATIC_INLINE void __disable_mmu(void) {
00403     // Clear M bit 0 to disable the MMU
00404     __set_SCTLR( __get_SCTLR() & ~1);
00405     __ISB();
00406 }
00407 
00408 /******************************** TLB maintenance operations ************************************************/
00409 /** \brief  Invalidate the whole tlb
00410 
00411     TLBIALL. Invalidate the whole tlb
00412  */
00413 
00414 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00415     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
00416     __TLBIALL = 0;
00417     __DSB();
00418     __ISB();
00419 }
00420 
00421 /******************************** BTB maintenance operations ************************************************/
00422 /** \brief  Invalidate entire branch predictor array
00423 
00424     BPIALL. Branch Predictor Invalidate All.
00425  */
00426 
00427 __STATIC_INLINE void __v7_inv_btac(void) {
00428     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
00429     __BPIALL  = 0;
00430     __DSB();     //ensure completion of the invalidation
00431     __ISB();     //ensure instruction fetch path sees new state
00432 }
00433 
00434 
00435 /******************************** L1 cache operations ******************************************************/
00436 
00437 /** \brief  Invalidate the whole I$
00438 
00439     ICIALLU. Instruction Cache Invalidate All to PoU
00440  */
00441 __STATIC_INLINE void __v7_inv_icache_all(void) {
00442     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
00443     __ICIALLU = 0;
00444     __DSB();     //ensure completion of the invalidation
00445     __ISB();     //ensure instruction fetch path sees new I cache state
00446 }
00447 
00448 /** \brief  Clean D$ by MVA
00449 
00450     DCCMVAC. Data cache clean by MVA to PoC
00451  */
00452 __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
00453     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
00454     __DCCMVAC = (uint32_t)va;
00455     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00456 }
00457 
00458 /** \brief  Invalidate D$ by MVA
00459 
00460     DCIMVAC. Data cache invalidate by MVA to PoC
00461  */
00462 __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
00463     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
00464     __DCIMVAC = (uint32_t)va;
00465     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00466 }
00467 
00468 /** \brief  Clean and Invalidate D$ by MVA
00469 
00470     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
00471  */
00472 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
00473     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
00474     __DCCIMVAC = (uint32_t)va;
00475     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00476 }
00477 
00478 /** \brief  Clean and Invalidate the entire data or unified cache
00479 
00480     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
00481  */
00482 #pragma push
00483 #pragma arm
00484 __STATIC_ASM void __v7_all_cache(uint32_t op) {
00485         ARM 
00486 
00487         PUSH    {R4-R11}
00488 
00489         MRC     p15, 1, R6, c0, c0, 1      // Read CLIDR
00490         ANDS    R3, R6, #0x07000000        // Extract coherency level
00491         MOV     R3, R3, LSR #23            // Total cache levels << 1
00492         BEQ     Finished                   // If 0, no need to clean
00493 
00494         MOV     R10, #0                    // R10 holds current cache level << 1
00495 Loop1   ADD     R2, R10, R10, LSR #1       // R2 holds cache "Set" position
00496         MOV     R1, R6, LSR R2             // Bottom 3 bits are the Cache-type for this level
00497         AND     R1, R1, #7                 // Isolate those lower 3 bits
00498         CMP     R1, #2
00499         BLT     Skip                       // No cache or only instruction cache at this level
00500 
00501         MCR     p15, 2, R10, c0, c0, 0     // Write the Cache Size selection register
00502         ISB                                // ISB to sync the change to the CacheSizeID reg
00503         MRC     p15, 1, R1, c0, c0, 0      // Reads current Cache Size ID register
00504         AND     R2, R1, #7                 // Extract the line length field
00505         ADD     R2, R2, #4                 // Add 4 for the line length offset (log2 16 bytes)
00506         LDR     R4, =0x3FF
00507         ANDS    R4, R4, R1, LSR #3         // R4 is the max number on the way size (right aligned)
00508         CLZ     R5, R4                     // R5 is the bit position of the way size increment
00509         LDR     R7, =0x7FFF
00510         ANDS    R7, R7, R1, LSR #13        // R7 is the max number of the index size (right aligned)
00511 
00512 Loop2   MOV     R9, R4                     // R9 working copy of the max way size (right aligned)
00513 
00514 Loop3   ORR     R11, R10, R9, LSL R5       // Factor in the Way number and cache number into R11
00515         ORR     R11, R11, R7, LSL R2       // Factor in the Set number
00516         CMP     R0, #0
00517         BNE     Dccsw
00518         MCR     p15, 0, R11, c7, c6, 2     // DCISW. Invalidate by Set/Way
00519         B       cont
00520 Dccsw   CMP     R0, #1
00521         BNE     Dccisw
00522         MCR     p15, 0, R11, c7, c10, 2    // DCCSW. Clean by Set/Way
00523         B       cont
00524 Dccisw  MCR     p15, 0, R11, c7, c14, 2    // DCCISW. Clean and Invalidate by Set/Way
00525 cont    SUBS    R9, R9, #1                 // Decrement the Way number
00526         BGE     Loop3
00527         SUBS    R7, R7, #1                 // Decrement the Set number
00528         BGE     Loop2
00529 Skip    ADD     R10, R10, #2               // Increment the cache number
00530         CMP     R3, R10
00531         BGT     Loop1
00532 
00533 Finished
00534         DSB
00535         POP    {R4-R11}
00536         BX     lr
00537 
00538 }
00539 #pragma pop
00540 
00541 
00542 /** \brief  Invalidate the whole D$
00543 
00544     DCISW. Invalidate by Set/Way
00545  */
00546 
00547 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00548     __v7_all_cache(0);
00549 }
00550 
00551 /** \brief  Clean the whole D$
00552 
00553     DCCSW. Clean by Set/Way
00554  */
00555 
00556 __STATIC_INLINE void __v7_clean_dcache_all(void) {
00557     __v7_all_cache(1);
00558 }
00559 
00560 /** \brief  Clean and invalidate the whole D$
00561 
00562     DCCISW. Clean and Invalidate by Set/Way
00563  */
00564 
00565 __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
00566     __v7_all_cache(2);
00567 }
00568 
00569 #include "core_ca_mmu.h"
00570 
00571 #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
00572 
00573 #define __inline inline
00574 
00575 inline static uint32_t __disable_irq_iar() {
00576   int irq_dis = __get_CPSR() & 0x80;      // 7bit CPSR.I
00577   __disable_irq();
00578   return irq_dis;
00579 }
00580 
00581 #define MODE_USR 0x10
00582 #define MODE_FIQ 0x11
00583 #define MODE_IRQ 0x12
00584 #define MODE_SVC 0x13
00585 #define MODE_MON 0x16
00586 #define MODE_ABT 0x17
00587 #define MODE_HYP 0x1A
00588 #define MODE_UND 0x1B
00589 #define MODE_SYS 0x1F
00590 
00591 /** \brief  Set Process Stack Pointer
00592 
00593     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00594 
00595     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00596  */
00597 // from rt_CMSIS.c
00598 __arm static inline void __set_PSP(uint32_t topOfProcStack) {
00599 __asm(
00600   "    ARM\n"
00601 //  "    PRESERVE8\n"
00602 
00603   "    BIC     R0, R0, #7  ;ensure stack is 8-byte aligned \n"
00604   "    MRS     R1, CPSR \n"
00605   "    CPS     #0x1F   ;no effect in USR mode \n"        // MODE_SYS
00606   "    MOV     SP, R0 \n"
00607   "    MSR     CPSR_c, R1  ;no effect in USR mode \n"
00608   "    ISB \n"
00609   "    BX      LR \n");
00610 }
00611 
00612 /** \brief  Set User Mode
00613 
00614     This function changes the processor state to User Mode
00615  */
00616 // from rt_CMSIS.c
00617 __arm static inline void __set_CPS_USR(void) {
00618 __asm(
00619   "    ARM \n"
00620 
00621   "    CPS  #0x10  \n"                  // MODE_USR
00622   "    BX   LR\n");
00623 }
00624 
00625 /** \brief  Set TTBR0
00626 
00627     This function assigns the given value to the Translation Table Base Register 0.
00628 
00629     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00630  */
00631 // from mmu_Renesas_RZ_A1.c
00632 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00633     __MCR(15, 0, ttbr0, 2, 0, 0);      // reg to cp15
00634     __ISB();
00635 }
00636 
00637 /** \brief  Set DACR
00638 
00639     This function assigns the given value to the Domain Access Control Register.
00640 
00641     \param [in]    dacr   Domain Access Control Register value to set
00642  */
00643 // from mmu_Renesas_RZ_A1.c
00644 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00645     __MCR(15, 0, dacr, 3, 0, 0);      // reg to cp15
00646     __ISB();
00647 }
00648 
00649 
00650 /******************************** Cache and BTAC enable  ****************************************************/
00651 /** \brief  Set SCTLR
00652 
00653     This function assigns the given value to the System Control Register.
00654 
00655     \param [in]    sctlr  System Control Register value to set
00656  */
00657 // from __enable_mmu()
00658 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr) {
00659     __MCR(15, 0, sctlr, 1, 0, 0);      // reg to cp15
00660 }
00661 
00662 /** \brief  Get SCTLR
00663 
00664     This function returns the value of the System Control Register.
00665 
00666     \return               System Control Register value
00667  */
00668 // from __enable_mmu()
00669 __STATIC_INLINE uint32_t __get_SCTLR() {
00670     uint32_t __regSCTLR = __MRC(15, 0, 1, 0, 0);
00671     return __regSCTLR;
00672 }
00673 
00674 /** \brief  Enable Caches
00675 
00676     Enable Caches
00677  */
00678 // from system_Renesas_RZ_A1.c
00679 __STATIC_INLINE void __enable_caches(void) {
00680     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00681 }
00682 
00683 /** \brief  Enable BTAC
00684 
00685     Enable BTAC
00686  */
00687 // from system_Renesas_RZ_A1.c
00688 __STATIC_INLINE void __enable_btac(void) {
00689     __set_SCTLR( __get_SCTLR() | (1 << 11));
00690     __ISB();
00691 }
00692 
00693 /** \brief  Enable MMU
00694 
00695     Enable MMU
00696  */
00697 // from system_Renesas_RZ_A1.c
00698 __STATIC_INLINE void __enable_mmu(void) {
00699     // Set M bit 0 to enable the MMU
00700     // Set AFE bit to enable simplified access permissions model
00701     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00702     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00703     __ISB();
00704 }
00705 
00706 /******************************** TLB maintenance operations ************************************************/
00707 /** \brief  Invalidate the whole tlb
00708 
00709     TLBIALL. Invalidate the whole tlb
00710  */
00711 // from system_Renesas_RZ_A1.c
00712 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00713     uint32_t val = 0;
00714     __MCR(15, 0, val, 8, 7, 0);      // reg to cp15
00715     __MCR(15, 0, val, 8, 6, 0);      // reg to cp15
00716     __MCR(15, 0, val, 8, 5, 0);      // reg to cp15
00717     __DSB();
00718     __ISB();
00719 }
00720 
00721 /******************************** BTB maintenance operations ************************************************/
00722 /** \brief  Invalidate entire branch predictor array
00723 
00724     BPIALL. Branch Predictor Invalidate All.
00725  */
00726 // from system_Renesas_RZ_A1.c
00727 __STATIC_INLINE void __v7_inv_btac(void) {
00728     uint32_t val = 0;
00729     __MCR(15, 0, val, 7, 5, 6);      // reg to cp15
00730     __DSB();     //ensure completion of the invalidation
00731     __ISB();     //ensure instruction fetch path sees new state
00732 }
00733 
00734 
00735 /******************************** L1 cache operations ******************************************************/
00736 
00737 /** \brief  Invalidate the whole I$
00738 
00739     ICIALLU. Instruction Cache Invalidate All to PoU
00740  */
00741 // from system_Renesas_RZ_A1.c
00742 __STATIC_INLINE void __v7_inv_icache_all(void) {
00743     uint32_t val = 0;
00744     __MCR(15, 0, val, 7, 5, 0);      // reg to cp15
00745     __DSB();     //ensure completion of the invalidation
00746     __ISB();     //ensure instruction fetch path sees new I cache state
00747 }
00748 
00749 // from __v7_inv_dcache_all()
00750 __arm static inline void __v7_all_cache(uint32_t op) {
00751 __asm(
00752     "        ARM \n"
00753 
00754     "        PUSH    {R4-R11} \n"
00755 
00756     "        MRC     p15, 1, R6, c0, c0, 1\n"      // Read CLIDR
00757     "        ANDS    R3, R6, #0x07000000\n"        // Extract coherency level
00758     "        MOV     R3, R3, LSR #23\n"            // Total cache levels << 1
00759     "        BEQ     Finished\n"                   // If 0, no need to clean
00760 
00761     "        MOV     R10, #0\n"                    // R10 holds current cache level << 1
00762     "Loop1:   ADD     R2, R10, R10, LSR #1\n"       // R2 holds cache "Set" position
00763     "        MOV     R1, R6, LSR R2 \n"            // Bottom 3 bits are the Cache-type for this level
00764     "        AND     R1, R1, #7 \n"                // Isolate those lower 3 bits
00765     "        CMP     R1, #2 \n"
00766     "        BLT     Skip \n"                      // No cache or only instruction cache at this level
00767 
00768     "        MCR     p15, 2, R10, c0, c0, 0 \n"    // Write the Cache Size selection register
00769     "        ISB \n"                               // ISB to sync the change to the CacheSizeID reg
00770     "        MRC     p15, 1, R1, c0, c0, 0 \n"     // Reads current Cache Size ID register
00771     "        AND     R2, R1, #7 \n"                // Extract the line length field
00772     "        ADD     R2, R2, #4 \n"                // Add 4 for the line length offset (log2 16 bytes)
00773     "        movw    R4, #0x3FF \n"
00774     "        ANDS    R4, R4, R1, LSR #3 \n"        // R4 is the max number on the way size (right aligned)
00775     "        CLZ     R5, R4 \n"                    // R5 is the bit position of the way size increment
00776     "        movw    R7, #0x7FFF \n"
00777     "        ANDS    R7, R7, R1, LSR #13 \n"       // R7 is the max number of the index size (right aligned)
00778 
00779     "Loop2:   MOV     R9, R4 \n"                    // R9 working copy of the max way size (right aligned)
00780 
00781     "Loop3:   ORR     R11, R10, R9, LSL R5 \n"      // Factor in the Way number and cache number into R11
00782     "        ORR     R11, R11, R7, LSL R2 \n"      // Factor in the Set number
00783     "        CMP     R0, #0 \n"
00784     "        BNE     Dccsw \n"
00785     "        MCR     p15, 0, R11, c7, c6, 2 \n"    // DCISW. Invalidate by Set/Way
00786     "        B       cont \n"
00787     "Dccsw:   CMP     R0, #1 \n"
00788     "        BNE     Dccisw \n"
00789     "        MCR     p15, 0, R11, c7, c10, 2 \n"   // DCCSW. Clean by Set/Way
00790     "        B       cont \n"
00791     "Dccisw:  MCR     p15, 0, R11, c7, c14, 2 \n"   // DCCISW, Clean and Invalidate by Set/Way
00792     "cont:    SUBS    R9, R9, #1 \n"                // Decrement the Way number
00793     "        BGE     Loop3 \n"
00794     "        SUBS    R7, R7, #1 \n"                // Decrement the Set number
00795     "        BGE     Loop2 \n"
00796     "Skip:    ADD     R10, R10, #2 \n"              // increment the cache number
00797     "        CMP     R3, R10 \n"
00798     "        BGT     Loop1 \n"
00799 
00800     "Finished: \n"
00801     "        DSB \n"
00802     "        POP    {R4-R11} \n"
00803     "        BX     lr \n" );
00804 }
00805 
00806 /** \brief  Invalidate the whole D$
00807 
00808     DCISW. Invalidate by Set/Way
00809  */
00810 // from system_Renesas_RZ_A1.c
00811 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00812     __v7_all_cache(0);
00813 }
00814 #include "core_ca_mmu.h"
00815 
00816 #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
00817 /* GNU gcc specific functions */
00818 
00819 #define MODE_USR 0x10
00820 #define MODE_FIQ 0x11
00821 #define MODE_IRQ 0x12
00822 #define MODE_SVC 0x13
00823 #define MODE_MON 0x16
00824 #define MODE_ABT 0x17
00825 #define MODE_HYP 0x1A
00826 #define MODE_UND 0x1B
00827 #define MODE_SYS 0x1F
00828 
00829 
00830 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
00831 {
00832     __ASM volatile ("cpsie i");
00833 }
00834 
00835 /** \brief  Disable IRQ Interrupts
00836 
00837   This function disables IRQ interrupts by setting the I-bit in the CPSR.
00838   Can only be executed in Privileged modes.
00839  */
00840 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
00841 {
00842     uint32_t result;
00843 
00844     __ASM volatile ("mrs %0, cpsr" : "=r" (result));
00845     __ASM volatile ("cpsid i");
00846     return(result & 0x80);
00847 }
00848 
00849 
00850 /** \brief  Get APSR Register
00851 
00852     This function returns the content of the APSR Register.
00853 
00854     \return               APSR Register value
00855  */
00856 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
00857 {
00858 #if 1
00859   register uint32_t __regAPSR;
00860   __ASM volatile ("mrs %0, apsr" : "=r" (__regAPSR) );
00861 #else
00862   register uint32_t __regAPSR          __ASM("apsr");
00863 #endif
00864   return(__regAPSR);
00865 }
00866 
00867 
00868 /** \brief  Get CPSR Register
00869 
00870     This function returns the content of the CPSR Register.
00871 
00872     \return               CPSR Register value
00873  */
00874 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
00875 {
00876 #if 1
00877   register uint32_t __regCPSR;
00878   __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
00879 #else
00880   register uint32_t __regCPSR          __ASM("cpsr");
00881 #endif
00882   return(__regCPSR);
00883 }
00884 
00885 #if 0
00886 /** \brief  Set Stack Pointer
00887 
00888     This function assigns the given value to the current stack pointer.
00889 
00890     \param [in]    topOfStack  Stack Pointer value to set
00891  */
00892 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00893 {
00894     register uint32_t __regSP       __ASM("sp");
00895     __regSP = topOfStack;
00896 }
00897 #endif
00898 
00899 /** \brief  Get link register
00900 
00901     This function returns the value of the link register
00902 
00903     \return    Value of link register
00904  */
00905 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
00906 {
00907   register uint32_t __reglr         __ASM("lr");
00908   return(__reglr);
00909 }
00910 
00911 #if 0
00912 /** \brief  Set link register
00913 
00914     This function sets the value of the link register
00915 
00916     \param [in]    lr  LR value to set
00917  */
00918 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
00919 {
00920   register uint32_t __reglr         __ASM("lr");
00921   __reglr = lr;
00922 }
00923 #endif
00924 
00925 /** \brief  Set Process Stack Pointer
00926 
00927     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00928 
00929     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00930  */
00931 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
00932 {
00933     __asm__ volatile (
00934     ".ARM;"
00935     ".eabi_attribute Tag_ABI_align8_preserved,1;"
00936 
00937     "BIC     R0, R0, #7;" /* ;ensure stack is 8-byte aligned */
00938     "MRS     R1, CPSR;"
00939     "CPS     %0;"         /* ;no effect in USR mode */
00940     "MOV     SP, R0;"
00941     "MSR     CPSR_c, R1;" /* ;no effect in USR mode */
00942     "ISB;"
00943     //"BX      LR;"
00944     :
00945     : "i"(MODE_SYS)
00946     : "r0", "r1");
00947     return;
00948 }
00949 
00950 /** \brief  Set User Mode
00951 
00952     This function changes the processor state to User Mode
00953  */
00954 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPS_USR(void)
00955 {
00956     __asm__ volatile (
00957     ".ARM;"
00958 
00959     "CPS  %0;"
00960     //"BX   LR;"
00961     :
00962     : "i"(MODE_USR)
00963     : );
00964     return;
00965 }
00966 
00967 
00968 /** \brief  Enable FIQ
00969 
00970     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00971     Can only be executed in Privileged modes.
00972  */
00973 #define __enable_fault_irq()                __asm__ volatile ("cpsie f")
00974 
00975 
00976 /** \brief  Disable FIQ
00977 
00978     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00979     Can only be executed in Privileged modes.
00980  */
00981 #define __disable_fault_irq()               __asm__ volatile ("cpsid f")
00982 
00983 
00984 /** \brief  Get FPSCR
00985 
00986     This function returns the current value of the Floating Point Status/Control register.
00987 
00988     \return               Floating Point Status/Control register value
00989  */
00990 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
00991 {
00992 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00993 #if 1
00994     uint32_t result;
00995 
00996     __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
00997     return (result);
00998 #else
00999   register uint32_t __regfpscr         __ASM("fpscr");
01000   return(__regfpscr);
01001 #endif
01002 #else
01003    return(0);
01004 #endif
01005 }
01006 
01007 
01008 /** \brief  Set FPSCR
01009 
01010     This function assigns the given value to the Floating Point Status/Control register.
01011 
01012     \param [in]    fpscr  Floating Point Status/Control value to set
01013  */
01014 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
01015 {
01016 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
01017 #if 1
01018     __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
01019 #else
01020   register uint32_t __regfpscr         __ASM("fpscr");
01021   __regfpscr = (fpscr);
01022 #endif
01023 #endif
01024 }
01025 
01026 /** \brief  Get FPEXC
01027 
01028     This function returns the current value of the Floating Point Exception Control register.
01029 
01030     \return               Floating Point Exception Control register value
01031  */
01032 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
01033 {
01034 #if (__FPU_PRESENT == 1)
01035 #if 1
01036     uint32_t result;
01037 
01038     __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
01039     return (result);
01040 #else
01041   register uint32_t __regfpexc         __ASM("fpexc");
01042   return(__regfpexc);
01043 #endif
01044 #else
01045    return(0);
01046 #endif
01047 }
01048 
01049 
01050 /** \brief  Set FPEXC
01051 
01052     This function assigns the given value to the Floating Point Exception Control register.
01053 
01054     \param [in]    fpscr  Floating Point Exception Control value to set
01055  */
01056 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
01057 {
01058 #if (__FPU_PRESENT == 1)
01059 #if 1
01060     __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
01061 #else
01062   register uint32_t __regfpexc         __ASM("fpexc");
01063   __regfpexc = (fpexc);
01064 #endif
01065 #endif
01066 }
01067 
01068 /** \brief  Get CPACR
01069 
01070     This function returns the current value of the Coprocessor Access Control register.
01071 
01072     \return               Coprocessor Access Control register value
01073  */
01074 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
01075 {
01076 #if 1
01077     register uint32_t __regCPACR;
01078     __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
01079 #else
01080     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
01081 #endif
01082     return __regCPACR;
01083 }
01084 
01085 /** \brief  Set CPACR
01086 
01087     This function assigns the given value to the Coprocessor Access Control register.
01088 
01089     \param [in]    cpacr  Coprocessor Acccess Control value to set
01090  */
01091 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
01092 {
01093 #if 1
01094     __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
01095 #else
01096     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
01097     __regCPACR = cpacr;
01098 #endif
01099     __ISB();
01100 }
01101 
01102 /** \brief  Get CBAR
01103 
01104     This function returns the value of the Configuration Base Address register.
01105 
01106     \return               Configuration Base Address register value
01107  */
01108 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
01109 #if 1
01110     register uint32_t __regCBAR;
01111     __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
01112 #else
01113     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
01114 #endif
01115     return(__regCBAR);
01116 }
01117 
01118 /** \brief  Get TTBR0
01119 
01120     This function returns the value of the Translation Table Base Register 0.
01121 
01122     \return               Translation Table Base Register 0 value
01123  */
01124 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
01125 #if 1
01126     register uint32_t __regTTBR0;
01127     __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
01128 #else
01129     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
01130 #endif
01131     return(__regTTBR0);
01132 }
01133 
01134 /** \brief  Set TTBR0
01135 
01136     This function assigns the given value to the Translation Table Base Register 0.
01137 
01138     \param [in]    ttbr0  Translation Table Base Register 0 value to set
01139  */
01140 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
01141 #if 1
01142     __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
01143 #else
01144     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
01145     __regTTBR0 = ttbr0;
01146 #endif
01147     __ISB();
01148 }
01149 
01150 /** \brief  Get DACR
01151 
01152     This function returns the value of the Domain Access Control Register.
01153 
01154     \return               Domain Access Control Register value
01155  */
01156 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
01157 #if 1
01158     register uint32_t __regDACR;
01159     __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
01160 #else
01161     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
01162 #endif
01163     return(__regDACR);
01164 }
01165 
01166 /** \brief  Set DACR
01167 
01168     This function assigns the given value to the Domain Access Control Register.
01169 
01170     \param [in]    dacr   Domain Access Control Register value to set
01171  */
01172 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
01173 #if 1
01174     __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
01175 #else
01176     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
01177     __regDACR = dacr;
01178 #endif
01179     __ISB();
01180 }
01181 
01182 /******************************** Cache and BTAC enable  ****************************************************/
01183 
01184 /** \brief  Set SCTLR
01185 
01186     This function assigns the given value to the System Control Register.
01187 
01188     \param [in]    sctlr  System Control Register value to set
01189  */
01190 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
01191 {
01192 #if 1
01193     __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
01194 #else
01195     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
01196     __regSCTLR = sctlr;
01197 #endif
01198 }
01199 
01200 /** \brief  Get SCTLR
01201 
01202     This function returns the value of the System Control Register.
01203 
01204     \return               System Control Register value
01205  */
01206 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
01207 #if 1
01208     register uint32_t __regSCTLR;
01209     __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
01210 #else
01211     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
01212 #endif
01213     return(__regSCTLR);
01214 }
01215 
01216 /** \brief  Enable Caches
01217 
01218     Enable Caches
01219  */
01220 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
01221     // Set I bit 12 to enable I Cache
01222     // Set C bit  2 to enable D Cache
01223     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
01224 }
01225 
01226 /** \brief  Disable Caches
01227 
01228     Disable Caches
01229  */
01230 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
01231     // Clear I bit 12 to disable I Cache
01232     // Clear C bit  2 to disable D Cache
01233     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
01234     __ISB();
01235 }
01236 
01237 /** \brief  Enable BTAC
01238 
01239     Enable BTAC
01240  */
01241 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
01242     // Set Z bit 11 to enable branch prediction
01243     __set_SCTLR( __get_SCTLR() | (1 << 11));
01244     __ISB();
01245 }
01246 
01247 /** \brief  Disable BTAC
01248 
01249     Disable BTAC
01250  */
01251 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
01252     // Clear Z bit 11 to disable branch prediction
01253     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
01254 }
01255 
01256 
01257 /** \brief  Enable MMU
01258 
01259     Enable MMU
01260  */
01261 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
01262     // Set M bit 0 to enable the MMU
01263     // Set AFE bit to enable simplified access permissions model
01264     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
01265     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
01266     __ISB();
01267 }
01268 
01269 /** \brief  Disable MMU
01270 
01271     Disable MMU
01272  */
01273 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
01274     // Clear M bit 0 to disable the MMU
01275     __set_SCTLR( __get_SCTLR() & ~1);
01276     __ISB();
01277 }
01278 
01279 /******************************** TLB maintenance operations ************************************************/
01280 /** \brief  Invalidate the whole tlb
01281 
01282     TLBIALL. Invalidate the whole tlb
01283  */
01284 
01285 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
01286 #if 1
01287     __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
01288 #else
01289     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
01290     __TLBIALL = 0;
01291 #endif
01292     __DSB();
01293     __ISB();
01294 }
01295 
01296 /******************************** BTB maintenance operations ************************************************/
01297 /** \brief  Invalidate entire branch predictor array
01298 
01299     BPIALL. Branch Predictor Invalidate All.
01300  */
01301 
01302 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
01303 #if 1
01304     __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
01305 #else
01306     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
01307     __BPIALL  = 0;
01308 #endif
01309     __DSB();     //ensure completion of the invalidation
01310     __ISB();     //ensure instruction fetch path sees new state
01311 }
01312 
01313 
01314 /******************************** L1 cache operations ******************************************************/
01315 
01316 /** \brief  Invalidate the whole I$
01317 
01318     ICIALLU. Instruction Cache Invalidate All to PoU
01319  */
01320 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
01321 #if 1
01322     __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
01323 #else
01324     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
01325     __ICIALLU = 0;
01326 #endif
01327     __DSB();     //ensure completion of the invalidation
01328     __ISB();     //ensure instruction fetch path sees new I cache state
01329 }
01330 
01331 /** \brief  Clean D$ by MVA
01332 
01333     DCCMVAC. Data cache clean by MVA to PoC
01334  */
01335 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
01336 #if 1
01337     __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
01338 #else
01339     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
01340     __DCCMVAC = (uint32_t)va;
01341 #endif
01342     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01343 }
01344 
01345 /** \brief  Invalidate D$ by MVA
01346 
01347     DCIMVAC. Data cache invalidate by MVA to PoC
01348  */
01349 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
01350 #if 1
01351     __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
01352 #else
01353     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
01354     __DCIMVAC = (uint32_t)va;
01355 #endif
01356     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01357 }
01358 
01359 /** \brief  Clean and Invalidate D$ by MVA
01360 
01361     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
01362  */
01363 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
01364 #if 1
01365     __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
01366 #else
01367     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
01368     __DCCIMVAC = (uint32_t)va;
01369 #endif
01370     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01371 }
01372 
01373 /** \brief  Clean and Invalidate the entire data or unified cache
01374 
01375     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
01376  */
01377 extern void __v7_all_cache(uint32_t op);
01378 
01379 
01380 /** \brief  Invalidate the whole D$
01381 
01382     DCISW. Invalidate by Set/Way
01383  */
01384 
01385 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
01386     __v7_all_cache(0);
01387 }
01388 
01389 /** \brief  Clean the whole D$
01390 
01391     DCCSW. Clean by Set/Way
01392  */
01393 
01394 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
01395     __v7_all_cache(1);
01396 }
01397 
01398 /** \brief  Clean and invalidate the whole D$
01399 
01400     DCCISW. Clean and Invalidate by Set/Way
01401  */
01402 
01403 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
01404     __v7_all_cache(2);
01405 }
01406 
01407 #include "core_ca_mmu.h"
01408 
01409 #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
01410 
01411 #error TASKING Compiler support not implemented for Cortex-A
01412 
01413 #endif
01414 
01415 /*@} end of CMSIS_Core_RegAccFunctions */
01416 
01417 
01418 #endif /* __CORE_CAFUNC_H__ */
01419