This is the final version of Mini Gateway for Automation and Security desgined for Renesas GR Peach Design Contest

Dependencies:   GR-PEACH_video GraphicsFramework HTTPServer R_BSP mbed-rpc mbed-rtos Socket lwip-eth lwip-sys lwip FATFileSystem

Fork of mbed-os-example-mbed5-blinky by mbed-os-examples

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers core_caFunc.h Source File

core_caFunc.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     core_caFunc.h
00003  * @brief    CMSIS Cortex-A Core Function Access Header File
00004  * @version  V3.10
00005  * @date     30 Oct 2013
00006  *
00007  * @note
00008  *
00009  ******************************************************************************/
00010 /* Copyright (c) 2009 - 2013 ARM LIMITED
00011 
00012    All rights reserved.
00013    Redistribution and use in source and binary forms, with or without
00014    modification, are permitted provided that the following conditions are met:
00015    - Redistributions of source code must retain the above copyright
00016      notice, this list of conditions and the following disclaimer.
00017    - Redistributions in binary form must reproduce the above copyright
00018      notice, this list of conditions and the following disclaimer in the
00019      documentation and/or other materials provided with the distribution.
00020    - Neither the name of ARM nor the names of its contributors may be used
00021      to endorse or promote products derived from this software without
00022      specific prior written permission.
00023    *
00024    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00025    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00026    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00027    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
00028    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00029    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00030    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00031    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00032    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00033    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00034    POSSIBILITY OF SUCH DAMAGE.
00035    ---------------------------------------------------------------------------*/
00036 
00037 
00038 #ifndef __CORE_CAFUNC_H__
00039 #define __CORE_CAFUNC_H__
00040 
00041 
00042 /* ###########################  Core Function Access  ########################### */
00043 /** \ingroup  CMSIS_Core_FunctionInterface
00044     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00045   @{
00046  */
00047 
00048 #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
00049 /* ARM armcc specific functions */
00050 
00051 #if (__ARMCC_VERSION < 400677)
00052   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
00053 #endif
00054 
00055 #define MODE_USR 0x10
00056 #define MODE_FIQ 0x11
00057 #define MODE_IRQ 0x12
00058 #define MODE_SVC 0x13
00059 #define MODE_MON 0x16
00060 #define MODE_ABT 0x17
00061 #define MODE_HYP 0x1A
00062 #define MODE_UND 0x1B
00063 #define MODE_SYS 0x1F
00064 
00065 /** \brief  Get APSR Register
00066 
00067     This function returns the content of the APSR Register.
00068 
00069     \return               APSR Register value
00070  */
00071 __STATIC_INLINE uint32_t __get_APSR(void)
00072 {
00073   register uint32_t __regAPSR          __ASM("apsr");
00074   return(__regAPSR);
00075 }
00076 
00077 
00078 /** \brief  Get CPSR Register
00079 
00080     This function returns the content of the CPSR Register.
00081 
00082     \return               CPSR Register value
00083  */
00084 __STATIC_INLINE uint32_t __get_CPSR(void)
00085 {
00086   register uint32_t __regCPSR          __ASM("cpsr");
00087   return(__regCPSR);
00088 }
00089 
00090 /** \brief  Set Stack Pointer
00091 
00092     This function assigns the given value to the current stack pointer.
00093 
00094     \param [in]    topOfStack  Stack Pointer value to set
00095  */
00096 register uint32_t __regSP              __ASM("sp");
00097 __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00098 {
00099     __regSP = topOfStack;
00100 }
00101 
00102 
00103 /** \brief  Get link register
00104 
00105     This function returns the value of the link register
00106 
00107     \return    Value of link register
00108  */
00109 register uint32_t __reglr         __ASM("lr");
00110 __STATIC_INLINE uint32_t __get_LR(void)
00111 {
00112   return(__reglr);
00113 }
00114 
00115 /** \brief  Set link register
00116 
00117     This function sets the value of the link register
00118 
00119     \param [in]    lr  LR value to set
00120  */
00121 __STATIC_INLINE void __set_LR(uint32_t lr)
00122 {
00123   __reglr = lr;
00124 }
00125 
00126 /** \brief  Set Process Stack Pointer
00127 
00128     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00129 
00130     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00131  */
00132 __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
00133 {
00134     ARM
00135     PRESERVE8
00136 
00137     BIC     R0, R0, #7  ;ensure stack is 8-byte aligned
00138     MRS     R1, CPSR
00139     CPS     #MODE_SYS   ;no effect in USR mode
00140     MOV     SP, R0
00141     MSR     CPSR_c, R1  ;no effect in USR mode
00142     ISB
00143     BX      LR
00144 
00145 }
00146 
00147 /** \brief  Set User Mode
00148 
00149     This function changes the processor state to User Mode
00150  */
00151 __STATIC_ASM void __set_CPS_USR(void)
00152 {
00153     ARM 
00154 
00155     CPS  #MODE_USR  
00156     BX   LR
00157 }
00158 
00159 
00160 /** \brief  Enable FIQ
00161 
00162     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00163     Can only be executed in Privileged modes.
00164  */
00165 #define __enable_fault_irq                __enable_fiq
00166 
00167 
00168 /** \brief  Disable FIQ
00169 
00170     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00171     Can only be executed in Privileged modes.
00172  */
00173 #define __disable_fault_irq               __disable_fiq
00174 
00175 
00176 /** \brief  Get FPSCR
00177 
00178     This function returns the current value of the Floating Point Status/Control register.
00179 
00180     \return               Floating Point Status/Control register value
00181  */
00182 __STATIC_INLINE uint32_t __get_FPSCR(void)
00183 {
00184 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00185   register uint32_t __regfpscr         __ASM("fpscr");
00186   return(__regfpscr);
00187 #else
00188    return(0);
00189 #endif
00190 }
00191 
00192 
00193 /** \brief  Set FPSCR
00194 
00195     This function assigns the given value to the Floating Point Status/Control register.
00196 
00197     \param [in]    fpscr  Floating Point Status/Control value to set
00198  */
00199 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
00200 {
00201 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00202   register uint32_t __regfpscr         __ASM("fpscr");
00203   __regfpscr = (fpscr);
00204 #endif
00205 }
00206 
00207 /** \brief  Get FPEXC
00208 
00209     This function returns the current value of the Floating Point Exception Control register.
00210 
00211     \return               Floating Point Exception Control register value
00212  */
00213 __STATIC_INLINE uint32_t __get_FPEXC(void)
00214 {
00215 #if (__FPU_PRESENT == 1)
00216   register uint32_t __regfpexc         __ASM("fpexc");
00217   return(__regfpexc);
00218 #else
00219    return(0);
00220 #endif
00221 }
00222 
00223 
00224 /** \brief  Set FPEXC
00225 
00226     This function assigns the given value to the Floating Point Exception Control register.
00227 
00228     \param [in]    fpscr  Floating Point Exception Control value to set
00229  */
00230 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
00231 {
00232 #if (__FPU_PRESENT == 1)
00233   register uint32_t __regfpexc         __ASM("fpexc");
00234   __regfpexc = (fpexc);
00235 #endif
00236 }
00237 
00238 /** \brief  Get CPACR
00239 
00240     This function returns the current value of the Coprocessor Access Control register.
00241 
00242     \return               Coprocessor Access Control register value
00243  */
00244 __STATIC_INLINE uint32_t __get_CPACR(void)
00245 {
00246     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00247     return __regCPACR;
00248 }
00249 
00250 /** \brief  Set CPACR
00251 
00252     This function assigns the given value to the Coprocessor Access Control register.
00253 
00254     \param [in]    cpacr  Coprocessor Acccess Control value to set
00255  */
00256 __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
00257 {
00258     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00259     __regCPACR = cpacr;
00260     __ISB();
00261 }
00262 
00263 /** \brief  Get CBAR
00264 
00265     This function returns the value of the Configuration Base Address register.
00266 
00267     \return               Configuration Base Address register value
00268  */
00269 __STATIC_INLINE uint32_t __get_CBAR() {
00270     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
00271     return(__regCBAR);
00272 }
00273 
00274 /** \brief  Get TTBR0
00275 
00276     This function returns the value of the Translation Table Base Register 0.
00277 
00278     \return               Translation Table Base Register 0 value
00279  */
00280 __STATIC_INLINE uint32_t __get_TTBR0() {
00281     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00282     return(__regTTBR0);
00283 }
00284 
00285 /** \brief  Set TTBR0
00286 
00287     This function assigns the given value to the Translation Table Base Register 0.
00288 
00289     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00290  */
00291 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00292     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00293     __regTTBR0 = ttbr0;
00294     __ISB();
00295 }
00296 
00297 /** \brief  Get DACR
00298 
00299     This function returns the value of the Domain Access Control Register.
00300 
00301     \return               Domain Access Control Register value
00302  */
00303 __STATIC_INLINE uint32_t __get_DACR() {
00304     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00305     return(__regDACR);
00306 }
00307 
00308 /** \brief  Set DACR
00309 
00310     This function assigns the given value to the Domain Access Control Register.
00311 
00312     \param [in]    dacr   Domain Access Control Register value to set
00313  */
00314 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00315     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00316     __regDACR = dacr;
00317     __ISB();
00318 }
00319 
00320 /******************************** Cache and BTAC enable  ****************************************************/
00321 
00322 /** \brief  Set SCTLR
00323 
00324     This function assigns the given value to the System Control Register.
00325 
00326     \param [in]    sctlr  System Control Register value to set
00327  */
00328 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
00329 {
00330     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00331     __regSCTLR = sctlr;
00332 }
00333 
00334 /** \brief  Get SCTLR
00335 
00336     This function returns the value of the System Control Register.
00337 
00338     \return               System Control Register value
00339  */
00340 __STATIC_INLINE uint32_t __get_SCTLR() {
00341     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00342     return(__regSCTLR);
00343 }
00344 
00345 /** \brief  Enable Caches
00346 
00347     Enable Caches
00348  */
00349 __STATIC_INLINE void __enable_caches(void) {
00350     // Set I bit 12 to enable I Cache
00351     // Set C bit  2 to enable D Cache
00352     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00353 }
00354 
00355 /** \brief  Disable Caches
00356 
00357     Disable Caches
00358  */
00359 __STATIC_INLINE void __disable_caches(void) {
00360     // Clear I bit 12 to disable I Cache
00361     // Clear C bit  2 to disable D Cache
00362     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
00363     __ISB();
00364 }
00365 
00366 /** \brief  Enable BTAC
00367 
00368     Enable BTAC
00369  */
00370 __STATIC_INLINE void __enable_btac(void) {
00371     // Set Z bit 11 to enable branch prediction
00372     __set_SCTLR( __get_SCTLR() | (1 << 11));
00373     __ISB();
00374 }
00375 
00376 /** \brief  Disable BTAC
00377 
00378     Disable BTAC
00379  */
00380 __STATIC_INLINE void __disable_btac(void) {
00381     // Clear Z bit 11 to disable branch prediction
00382     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
00383 }
00384 
00385 
00386 /** \brief  Enable MMU
00387 
00388     Enable MMU
00389  */
00390 __STATIC_INLINE void __enable_mmu(void) {
00391     // Set M bit 0 to enable the MMU
00392     // Set AFE bit to enable simplified access permissions model
00393     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00394     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00395     __ISB();
00396 }
00397 
00398 /** \brief  Disable MMU
00399 
00400     Disable MMU
00401  */
00402 __STATIC_INLINE void __disable_mmu(void) {
00403     // Clear M bit 0 to disable the MMU
00404     __set_SCTLR( __get_SCTLR() & ~1);
00405     __ISB();
00406 }
00407 
00408 /******************************** TLB maintenance operations ************************************************/
00409 /** \brief  Invalidate the whole tlb
00410 
00411     TLBIALL. Invalidate the whole tlb
00412  */
00413 
00414 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00415     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
00416     __TLBIALL = 0;
00417     __DSB();
00418     __ISB();
00419 }
00420 
00421 /******************************** BTB maintenance operations ************************************************/
00422 /** \brief  Invalidate entire branch predictor array
00423 
00424     BPIALL. Branch Predictor Invalidate All.
00425  */
00426 
00427 __STATIC_INLINE void __v7_inv_btac(void) {
00428     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
00429     __BPIALL  = 0;
00430     __DSB();     //ensure completion of the invalidation
00431     __ISB();     //ensure instruction fetch path sees new state
00432 }
00433 
00434 
00435 /******************************** L1 cache operations ******************************************************/
00436 
00437 /** \brief  Invalidate the whole I$
00438 
00439     ICIALLU. Instruction Cache Invalidate All to PoU
00440  */
00441 __STATIC_INLINE void __v7_inv_icache_all(void) {
00442     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
00443     __ICIALLU = 0;
00444     __DSB();     //ensure completion of the invalidation
00445     __ISB();     //ensure instruction fetch path sees new I cache state
00446 }
00447 
00448 /** \brief  Clean D$ by MVA
00449 
00450     DCCMVAC. Data cache clean by MVA to PoC
00451  */
00452 __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
00453     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
00454     __DCCMVAC = (uint32_t)va;
00455     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00456 }
00457 
00458 /** \brief  Invalidate D$ by MVA
00459 
00460     DCIMVAC. Data cache invalidate by MVA to PoC
00461  */
00462 __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
00463     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
00464     __DCIMVAC = (uint32_t)va;
00465     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00466 }
00467 
00468 /** \brief  Clean and Invalidate D$ by MVA
00469 
00470     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
00471  */
00472 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
00473     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
00474     __DCCIMVAC = (uint32_t)va;
00475     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00476 }
00477 
00478 /** \brief  Clean and Invalidate the entire data or unified cache
00479 
00480     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
00481  */
00482 #pragma push
00483 #pragma arm
00484 __STATIC_ASM void __v7_all_cache(uint32_t op) {
00485         ARM 
00486 
00487         PUSH    {R4-R11}
00488 
00489         MRC     p15, 1, R6, c0, c0, 1      // Read CLIDR
00490         ANDS    R3, R6, #0x07000000        // Extract coherency level
00491         MOV     R3, R3, LSR #23            // Total cache levels << 1
00492         BEQ     Finished                   // If 0, no need to clean
00493 
00494         MOV     R10, #0                    // R10 holds current cache level << 1
00495 Loop1   ADD     R2, R10, R10, LSR #1       // R2 holds cache "Set" position
00496         MOV     R1, R6, LSR R2             // Bottom 3 bits are the Cache-type for this level
00497         AND     R1, R1, #7                 // Isolate those lower 3 bits
00498         CMP     R1, #2
00499         BLT     Skip                       // No cache or only instruction cache at this level
00500 
00501         MCR     p15, 2, R10, c0, c0, 0     // Write the Cache Size selection register
00502         ISB                                // ISB to sync the change to the CacheSizeID reg
00503         MRC     p15, 1, R1, c0, c0, 0      // Reads current Cache Size ID register
00504         AND     R2, R1, #7                 // Extract the line length field
00505         ADD     R2, R2, #4                 // Add 4 for the line length offset (log2 16 bytes)
00506         LDR     R4, =0x3FF
00507         ANDS    R4, R4, R1, LSR #3         // R4 is the max number on the way size (right aligned)
00508         CLZ     R5, R4                     // R5 is the bit position of the way size increment
00509         LDR     R7, =0x7FFF
00510         ANDS    R7, R7, R1, LSR #13        // R7 is the max number of the index size (right aligned)
00511 
00512 Loop2   MOV     R9, R4                     // R9 working copy of the max way size (right aligned)
00513 
00514 Loop3   ORR     R11, R10, R9, LSL R5       // Factor in the Way number and cache number into R11
00515         ORR     R11, R11, R7, LSL R2       // Factor in the Set number
00516         CMP     R0, #0
00517         BNE     Dccsw
00518         MCR     p15, 0, R11, c7, c6, 2     // DCISW. Invalidate by Set/Way
00519         B       cont
00520 Dccsw   CMP     R0, #1
00521         BNE     Dccisw
00522         MCR     p15, 0, R11, c7, c10, 2    // DCCSW. Clean by Set/Way
00523         B       cont
00524 Dccisw  MCR     p15, 0, R11, c7, c14, 2    // DCCISW. Clean and Invalidate by Set/Way
00525 cont    SUBS    R9, R9, #1                 // Decrement the Way number
00526         BGE     Loop3
00527         SUBS    R7, R7, #1                 // Decrement the Set number
00528         BGE     Loop2
00529 Skip    ADD     R10, R10, #2               // Increment the cache number
00530         CMP     R3, R10
00531         BGT     Loop1
00532 
00533 Finished
00534         DSB
00535         POP    {R4-R11}
00536         BX     lr
00537 
00538 }
00539 #pragma pop
00540 
00541 
00542 /** \brief  Invalidate the whole D$
00543 
00544     DCISW. Invalidate by Set/Way
00545  */
00546 
00547 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00548     __v7_all_cache(0);
00549 }
00550 
00551 /** \brief  Clean the whole D$
00552 
00553     DCCSW. Clean by Set/Way
00554  */
00555 
00556 __STATIC_INLINE void __v7_clean_dcache_all(void) {
00557     __v7_all_cache(1);
00558 }
00559 
00560 /** \brief  Clean and invalidate the whole D$
00561 
00562     DCCISW. Clean and Invalidate by Set/Way
00563  */
00564 
00565 __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
00566     __v7_all_cache(2);
00567 }
00568 
00569 #include "core_ca_mmu.h"
00570 
00571 #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
00572 
00573 #define __inline inline
00574 
00575 inline static uint32_t __disable_irq_iar() {
00576   int irq_dis = __get_CPSR() & 0x80;      // 7bit CPSR.I
00577   __disable_irq();
00578   return irq_dis;
00579 }
00580 
00581 #define MODE_USR 0x10
00582 #define MODE_FIQ 0x11
00583 #define MODE_IRQ 0x12
00584 #define MODE_SVC 0x13
00585 #define MODE_MON 0x16
00586 #define MODE_ABT 0x17
00587 #define MODE_HYP 0x1A
00588 #define MODE_UND 0x1B
00589 #define MODE_SYS 0x1F
00590 
00591 /** \brief  Set Process Stack Pointer
00592 
00593     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00594 
00595     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00596  */
00597 // from rt_CMSIS.c
00598 __arm static inline void __set_PSP(uint32_t topOfProcStack) {
00599 __asm(
00600   "    ARM\n"
00601 //  "    PRESERVE8\n"
00602 
00603   "    BIC     R0, R0, #7  ;ensure stack is 8-byte aligned \n"
00604   "    MRS     R1, CPSR \n"
00605   "    CPS     #0x1F   ;no effect in USR mode \n"        // MODE_SYS
00606   "    MOV     SP, R0 \n"
00607   "    MSR     CPSR_c, R1  ;no effect in USR mode \n"
00608   "    ISB \n"
00609   "    BX      LR \n");
00610 }
00611 
00612 /** \brief  Set User Mode
00613 
00614     This function changes the processor state to User Mode
00615  */
00616 // from rt_CMSIS.c
00617 __arm static inline void __set_CPS_USR(void) {
00618 __asm(
00619   "    ARM \n"
00620 
00621   "    CPS  #0x10  \n"                  // MODE_USR
00622   "    BX   LR\n");
00623 }
00624 
00625 /** \brief  Set TTBR0
00626 
00627     This function assigns the given value to the Translation Table Base Register 0.
00628 
00629     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00630  */
00631 // from mmu_Renesas_RZ_A1.c
00632 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00633     __MCR(15, 0, ttbr0, 2, 0, 0);      // reg to cp15
00634     __ISB();
00635 }
00636 
00637 /** \brief  Set DACR
00638 
00639     This function assigns the given value to the Domain Access Control Register.
00640 
00641     \param [in]    dacr   Domain Access Control Register value to set
00642  */
00643 // from mmu_Renesas_RZ_A1.c
00644 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00645     __MCR(15, 0, dacr, 3, 0, 0);      // reg to cp15
00646     __ISB();
00647 }
00648 
00649 
00650 /******************************** Cache and BTAC enable  ****************************************************/
00651 /** \brief  Set SCTLR
00652 
00653     This function assigns the given value to the System Control Register.
00654 
00655     \param [in]    sctlr  System Control Register value to set
00656  */
00657 // from __enable_mmu()
00658 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr) {
00659     __MCR(15, 0, sctlr, 1, 0, 0);      // reg to cp15
00660 }
00661 
00662 /** \brief  Get SCTLR
00663 
00664     This function returns the value of the System Control Register.
00665 
00666     \return               System Control Register value
00667  */
00668 // from __enable_mmu()
00669 __STATIC_INLINE uint32_t __get_SCTLR() {
00670     uint32_t __regSCTLR = __MRC(15, 0, 1, 0, 0);
00671     return __regSCTLR;
00672 }
00673 
00674 /** \brief  Enable Caches
00675 
00676     Enable Caches
00677  */
00678 // from system_Renesas_RZ_A1.c
00679 __STATIC_INLINE void __enable_caches(void) {
00680     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00681 }
00682 
00683 /** \brief  Enable BTAC
00684 
00685     Enable BTAC
00686  */
00687 // from system_Renesas_RZ_A1.c
00688 __STATIC_INLINE void __enable_btac(void) {
00689     __set_SCTLR( __get_SCTLR() | (1 << 11));
00690     __ISB();
00691 }
00692 
00693 /** \brief  Enable MMU
00694 
00695     Enable MMU
00696  */
00697 // from system_Renesas_RZ_A1.c
00698 __STATIC_INLINE void __enable_mmu(void) {
00699     // Set M bit 0 to enable the MMU
00700     // Set AFE bit to enable simplified access permissions model
00701     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00702     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00703     __ISB();
00704 }
00705 
00706 /******************************** TLB maintenance operations ************************************************/
00707 /** \brief  Invalidate the whole tlb
00708 
00709     TLBIALL. Invalidate the whole tlb
00710  */
00711 // from system_Renesas_RZ_A1.c
00712 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00713     uint32_t val = 0;
00714     __MCR(15, 0, val, 8, 7, 0);      // reg to cp15
00715     __MCR(15, 0, val, 8, 6, 0);      // reg to cp15
00716     __MCR(15, 0, val, 8, 5, 0);      // reg to cp15
00717     __DSB();
00718     __ISB();
00719 }
00720 
00721 /******************************** BTB maintenance operations ************************************************/
00722 /** \brief  Invalidate entire branch predictor array
00723 
00724     BPIALL. Branch Predictor Invalidate All.
00725  */
00726 // from system_Renesas_RZ_A1.c
00727 __STATIC_INLINE void __v7_inv_btac(void) {
00728     uint32_t val = 0;
00729     __MCR(15, 0, val, 7, 5, 6);      // reg to cp15
00730     __DSB();     //ensure completion of the invalidation
00731     __ISB();     //ensure instruction fetch path sees new state
00732 }
00733 
00734 
00735 /******************************** L1 cache operations ******************************************************/
00736 
00737 /** \brief  Invalidate the whole I$
00738 
00739     ICIALLU. Instruction Cache Invalidate All to PoU
00740  */
00741 // from system_Renesas_RZ_A1.c
00742 __STATIC_INLINE void __v7_inv_icache_all(void) {
00743     uint32_t val = 0;
00744     __MCR(15, 0, val, 7, 5, 0);      // reg to cp15
00745     __DSB();     //ensure completion of the invalidation
00746     __ISB();     //ensure instruction fetch path sees new I cache state
00747 }
00748 
00749 // from __v7_inv_dcache_all()
00750 __arm static inline void __v7_all_cache(uint32_t op) {
00751 __asm(
00752     "        ARM \n"
00753 
00754     "        PUSH    {R4-R11} \n"
00755 
00756     "        MRC     p15, 1, R6, c0, c0, 1\n"      // Read CLIDR
00757     "        ANDS    R3, R6, #0x07000000\n"        // Extract coherency level
00758     "        MOV     R3, R3, LSR #23\n"            // Total cache levels << 1
00759     "        BEQ     Finished\n"                   // If 0, no need to clean
00760 
00761     "        MOV     R10, #0\n"                    // R10 holds current cache level << 1
00762     "Loop1:   ADD     R2, R10, R10, LSR #1\n"       // R2 holds cache "Set" position
00763     "        MOV     R1, R6, LSR R2 \n"            // Bottom 3 bits are the Cache-type for this level
00764     "        AND     R1, R1, #7 \n"                // Isolate those lower 3 bits
00765     "        CMP     R1, #2 \n"
00766     "        BLT     Skip \n"                      // No cache or only instruction cache at this level
00767 
00768     "        MCR     p15, 2, R10, c0, c0, 0 \n"    // Write the Cache Size selection register
00769     "        ISB \n"                               // ISB to sync the change to the CacheSizeID reg
00770     "        MRC     p15, 1, R1, c0, c0, 0 \n"     // Reads current Cache Size ID register
00771     "        AND     R2, R1, #7 \n"                // Extract the line length field
00772     "        ADD     R2, R2, #4 \n"                // Add 4 for the line length offset (log2 16 bytes)
00773     "        movw    R4, #0x3FF \n"
00774     "        ANDS    R4, R4, R1, LSR #3 \n"        // R4 is the max number on the way size (right aligned)
00775     "        CLZ     R5, R4 \n"                    // R5 is the bit position of the way size increment
00776     "        movw    R7, #0x7FFF \n"
00777     "        ANDS    R7, R7, R1, LSR #13 \n"       // R7 is the max number of the index size (right aligned)
00778 
00779     "Loop2:   MOV     R9, R4 \n"                    // R9 working copy of the max way size (right aligned)
00780 
00781     "Loop3:   ORR     R11, R10, R9, LSL R5 \n"      // Factor in the Way number and cache number into R11
00782     "        ORR     R11, R11, R7, LSL R2 \n"      // Factor in the Set number
00783     "        CMP     R0, #0 \n"
00784     "        BNE     Dccsw \n"
00785     "        MCR     p15, 0, R11, c7, c6, 2 \n"    // DCISW. Invalidate by Set/Way
00786     "        B       cont \n"
00787     "Dccsw:   CMP     R0, #1 \n"
00788     "        BNE     Dccisw \n"
00789     "        MCR     p15, 0, R11, c7, c10, 2 \n"   // DCCSW. Clean by Set/Way
00790     "        B       cont \n"
00791     "Dccisw:  MCR     p15, 0, R11, c7, c14, 2 \n"   // DCCISW, Clean and Invalidate by Set/Way
00792     "cont:    SUBS    R9, R9, #1 \n"                // Decrement the Way number
00793     "        BGE     Loop3 \n"
00794     "        SUBS    R7, R7, #1 \n"                // Decrement the Set number
00795     "        BGE     Loop2 \n"
00796     "Skip:    ADD     R10, R10, #2 \n"              // increment the cache number
00797     "        CMP     R3, R10 \n"
00798     "        BGT     Loop1 \n"
00799 
00800     "Finished: \n"
00801     "        DSB \n"
00802     "        POP    {R4-R11} \n"
00803     "        BX     lr \n" );
00804 }
00805 
00806 /** \brief  Invalidate the whole D$
00807 
00808     DCISW. Invalidate by Set/Way
00809  */
00810 // from system_Renesas_RZ_A1.c
00811 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00812     __v7_all_cache(0);
00813 }
00814 /** \brief  Clean the whole D$
00815 
00816     DCCSW. Clean by Set/Way
00817  */
00818 
00819 __STATIC_INLINE void __v7_clean_dcache_all(void) {
00820     __v7_all_cache(1);
00821 }
00822 
00823 /** \brief  Clean and invalidate the whole D$
00824 
00825     DCCISW. Clean and Invalidate by Set/Way
00826  */
00827 
00828 __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
00829     __v7_all_cache(2);
00830 }
00831 /** \brief  Clean and Invalidate D$ by MVA
00832 
00833     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
00834  */
00835 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
00836     __MCR(15, 0, (uint32_t)va, 7, 14, 1);
00837     __DMB();
00838 }
00839 
00840 #include "core_ca_mmu.h"
00841 
00842 #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
00843 /* GNU gcc specific functions */
00844 
00845 #define MODE_USR 0x10
00846 #define MODE_FIQ 0x11
00847 #define MODE_IRQ 0x12
00848 #define MODE_SVC 0x13
00849 #define MODE_MON 0x16
00850 #define MODE_ABT 0x17
00851 #define MODE_HYP 0x1A
00852 #define MODE_UND 0x1B
00853 #define MODE_SYS 0x1F
00854 
00855 
00856 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
00857 {
00858     __ASM volatile ("cpsie i");
00859 }
00860 
00861 /** \brief  Disable IRQ Interrupts
00862 
00863   This function disables IRQ interrupts by setting the I-bit in the CPSR.
00864   Can only be executed in Privileged modes.
00865  */
00866 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
00867 {
00868     uint32_t result;
00869 
00870     __ASM volatile ("mrs %0, cpsr" : "=r" (result));
00871     __ASM volatile ("cpsid i");
00872     return(result & 0x80);
00873 }
00874 
00875 
00876 /** \brief  Get APSR Register
00877 
00878     This function returns the content of the APSR Register.
00879 
00880     \return               APSR Register value
00881  */
00882 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
00883 {
00884 #if 1
00885   register uint32_t __regAPSR;
00886   __ASM volatile ("mrs %0, apsr" : "=r" (__regAPSR) );
00887 #else
00888   register uint32_t __regAPSR          __ASM("apsr");
00889 #endif
00890   return(__regAPSR);
00891 }
00892 
00893 
00894 /** \brief  Get CPSR Register
00895 
00896     This function returns the content of the CPSR Register.
00897 
00898     \return               CPSR Register value
00899  */
00900 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
00901 {
00902 #if 1
00903   register uint32_t __regCPSR;
00904   __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
00905 #else
00906   register uint32_t __regCPSR          __ASM("cpsr");
00907 #endif
00908   return(__regCPSR);
00909 }
00910 
00911 #if 0
00912 /** \brief  Set Stack Pointer
00913 
00914     This function assigns the given value to the current stack pointer.
00915 
00916     \param [in]    topOfStack  Stack Pointer value to set
00917  */
00918 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00919 {
00920     register uint32_t __regSP       __ASM("sp");
00921     __regSP = topOfStack;
00922 }
00923 #endif
00924 
00925 /** \brief  Get link register
00926 
00927     This function returns the value of the link register
00928 
00929     \return    Value of link register
00930  */
00931 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
00932 {
00933   register uint32_t __reglr         __ASM("lr");
00934   return(__reglr);
00935 }
00936 
00937 #if 0
00938 /** \brief  Set link register
00939 
00940     This function sets the value of the link register
00941 
00942     \param [in]    lr  LR value to set
00943  */
00944 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
00945 {
00946   register uint32_t __reglr         __ASM("lr");
00947   __reglr = lr;
00948 }
00949 #endif
00950 
00951 /** \brief  Set Process Stack Pointer
00952 
00953     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00954 
00955     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00956  */
00957 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
00958 {
00959     __asm__ volatile (
00960     ".ARM;"
00961     ".eabi_attribute Tag_ABI_align8_preserved,1;"
00962 
00963     "BIC     R0, R0, #7;" /* ;ensure stack is 8-byte aligned */
00964     "MRS     R1, CPSR;"
00965     "CPS     %0;"         /* ;no effect in USR mode */
00966     "MOV     SP, R0;"
00967     "MSR     CPSR_c, R1;" /* ;no effect in USR mode */
00968     "ISB;"
00969     //"BX      LR;"
00970     :
00971     : "i"(MODE_SYS)
00972     : "r0", "r1");
00973     return;
00974 }
00975 
00976 /** \brief  Set User Mode
00977 
00978     This function changes the processor state to User Mode
00979  */
00980 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPS_USR(void)
00981 {
00982     __asm__ volatile (
00983     ".ARM;"
00984 
00985     "CPS  %0;"
00986     //"BX   LR;"
00987     :
00988     : "i"(MODE_USR)
00989     : );
00990     return;
00991 }
00992 
00993 
00994 /** \brief  Enable FIQ
00995 
00996     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00997     Can only be executed in Privileged modes.
00998  */
00999 #define __enable_fault_irq()                __asm__ volatile ("cpsie f")
01000 
01001 
01002 /** \brief  Disable FIQ
01003 
01004     This function disables FIQ interrupts by setting the F-bit in the CPSR.
01005     Can only be executed in Privileged modes.
01006  */
01007 #define __disable_fault_irq()               __asm__ volatile ("cpsid f")
01008 
01009 
01010 /** \brief  Get FPSCR
01011 
01012     This function returns the current value of the Floating Point Status/Control register.
01013 
01014     \return               Floating Point Status/Control register value
01015  */
01016 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
01017 {
01018 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
01019 #if 1
01020     uint32_t result;
01021 
01022     __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
01023     return (result);
01024 #else
01025   register uint32_t __regfpscr         __ASM("fpscr");
01026   return(__regfpscr);
01027 #endif
01028 #else
01029    return(0);
01030 #endif
01031 }
01032 
01033 
01034 /** \brief  Set FPSCR
01035 
01036     This function assigns the given value to the Floating Point Status/Control register.
01037 
01038     \param [in]    fpscr  Floating Point Status/Control value to set
01039  */
01040 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
01041 {
01042 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
01043 #if 1
01044     __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
01045 #else
01046   register uint32_t __regfpscr         __ASM("fpscr");
01047   __regfpscr = (fpscr);
01048 #endif
01049 #endif
01050 }
01051 
01052 /** \brief  Get FPEXC
01053 
01054     This function returns the current value of the Floating Point Exception Control register.
01055 
01056     \return               Floating Point Exception Control register value
01057  */
01058 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
01059 {
01060 #if (__FPU_PRESENT == 1)
01061 #if 1
01062     uint32_t result;
01063 
01064     __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
01065     return (result);
01066 #else
01067   register uint32_t __regfpexc         __ASM("fpexc");
01068   return(__regfpexc);
01069 #endif
01070 #else
01071    return(0);
01072 #endif
01073 }
01074 
01075 
01076 /** \brief  Set FPEXC
01077 
01078     This function assigns the given value to the Floating Point Exception Control register.
01079 
01080     \param [in]    fpscr  Floating Point Exception Control value to set
01081  */
01082 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
01083 {
01084 #if (__FPU_PRESENT == 1)
01085 #if 1
01086     __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
01087 #else
01088   register uint32_t __regfpexc         __ASM("fpexc");
01089   __regfpexc = (fpexc);
01090 #endif
01091 #endif
01092 }
01093 
01094 /** \brief  Get CPACR
01095 
01096     This function returns the current value of the Coprocessor Access Control register.
01097 
01098     \return               Coprocessor Access Control register value
01099  */
01100 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
01101 {
01102 #if 1
01103     register uint32_t __regCPACR;
01104     __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
01105 #else
01106     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
01107 #endif
01108     return __regCPACR;
01109 }
01110 
01111 /** \brief  Set CPACR
01112 
01113     This function assigns the given value to the Coprocessor Access Control register.
01114 
01115     \param [in]    cpacr  Coprocessor Acccess Control value to set
01116  */
01117 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
01118 {
01119 #if 1
01120     __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
01121 #else
01122     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
01123     __regCPACR = cpacr;
01124 #endif
01125     __ISB();
01126 }
01127 
01128 /** \brief  Get CBAR
01129 
01130     This function returns the value of the Configuration Base Address register.
01131 
01132     \return               Configuration Base Address register value
01133  */
01134 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
01135 #if 1
01136     register uint32_t __regCBAR;
01137     __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
01138 #else
01139     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
01140 #endif
01141     return(__regCBAR);
01142 }
01143 
01144 /** \brief  Get TTBR0
01145 
01146     This function returns the value of the Translation Table Base Register 0.
01147 
01148     \return               Translation Table Base Register 0 value
01149  */
01150 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
01151 #if 1
01152     register uint32_t __regTTBR0;
01153     __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
01154 #else
01155     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
01156 #endif
01157     return(__regTTBR0);
01158 }
01159 
01160 /** \brief  Set TTBR0
01161 
01162     This function assigns the given value to the Translation Table Base Register 0.
01163 
01164     \param [in]    ttbr0  Translation Table Base Register 0 value to set
01165  */
01166 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
01167 #if 1
01168     __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
01169 #else
01170     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
01171     __regTTBR0 = ttbr0;
01172 #endif
01173     __ISB();
01174 }
01175 
01176 /** \brief  Get DACR
01177 
01178     This function returns the value of the Domain Access Control Register.
01179 
01180     \return               Domain Access Control Register value
01181  */
01182 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
01183 #if 1
01184     register uint32_t __regDACR;
01185     __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
01186 #else
01187     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
01188 #endif
01189     return(__regDACR);
01190 }
01191 
01192 /** \brief  Set DACR
01193 
01194     This function assigns the given value to the Domain Access Control Register.
01195 
01196     \param [in]    dacr   Domain Access Control Register value to set
01197  */
01198 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
01199 #if 1
01200     __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
01201 #else
01202     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
01203     __regDACR = dacr;
01204 #endif
01205     __ISB();
01206 }
01207 
01208 /******************************** Cache and BTAC enable  ****************************************************/
01209 
01210 /** \brief  Set SCTLR
01211 
01212     This function assigns the given value to the System Control Register.
01213 
01214     \param [in]    sctlr  System Control Register value to set
01215  */
01216 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
01217 {
01218 #if 1
01219     __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
01220 #else
01221     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
01222     __regSCTLR = sctlr;
01223 #endif
01224 }
01225 
01226 /** \brief  Get SCTLR
01227 
01228     This function returns the value of the System Control Register.
01229 
01230     \return               System Control Register value
01231  */
01232 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
01233 #if 1
01234     register uint32_t __regSCTLR;
01235     __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
01236 #else
01237     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
01238 #endif
01239     return(__regSCTLR);
01240 }
01241 
01242 /** \brief  Enable Caches
01243 
01244     Enable Caches
01245  */
01246 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
01247     // Set I bit 12 to enable I Cache
01248     // Set C bit  2 to enable D Cache
01249     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
01250 }
01251 
01252 /** \brief  Disable Caches
01253 
01254     Disable Caches
01255  */
01256 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
01257     // Clear I bit 12 to disable I Cache
01258     // Clear C bit  2 to disable D Cache
01259     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
01260     __ISB();
01261 }
01262 
01263 /** \brief  Enable BTAC
01264 
01265     Enable BTAC
01266  */
01267 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
01268     // Set Z bit 11 to enable branch prediction
01269     __set_SCTLR( __get_SCTLR() | (1 << 11));
01270     __ISB();
01271 }
01272 
01273 /** \brief  Disable BTAC
01274 
01275     Disable BTAC
01276  */
01277 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
01278     // Clear Z bit 11 to disable branch prediction
01279     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
01280 }
01281 
01282 
01283 /** \brief  Enable MMU
01284 
01285     Enable MMU
01286  */
01287 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
01288     // Set M bit 0 to enable the MMU
01289     // Set AFE bit to enable simplified access permissions model
01290     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
01291     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
01292     __ISB();
01293 }
01294 
01295 /** \brief  Disable MMU
01296 
01297     Disable MMU
01298  */
01299 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
01300     // Clear M bit 0 to disable the MMU
01301     __set_SCTLR( __get_SCTLR() & ~1);
01302     __ISB();
01303 }
01304 
01305 /******************************** TLB maintenance operations ************************************************/
01306 /** \brief  Invalidate the whole tlb
01307 
01308     TLBIALL. Invalidate the whole tlb
01309  */
01310 
01311 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
01312 #if 1
01313     __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
01314 #else
01315     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
01316     __TLBIALL = 0;
01317 #endif
01318     __DSB();
01319     __ISB();
01320 }
01321 
01322 /******************************** BTB maintenance operations ************************************************/
01323 /** \brief  Invalidate entire branch predictor array
01324 
01325     BPIALL. Branch Predictor Invalidate All.
01326  */
01327 
01328 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
01329 #if 1
01330     __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
01331 #else
01332     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
01333     __BPIALL  = 0;
01334 #endif
01335     __DSB();     //ensure completion of the invalidation
01336     __ISB();     //ensure instruction fetch path sees new state
01337 }
01338 
01339 
01340 /******************************** L1 cache operations ******************************************************/
01341 
01342 /** \brief  Invalidate the whole I$
01343 
01344     ICIALLU. Instruction Cache Invalidate All to PoU
01345  */
01346 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
01347 #if 1
01348     __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
01349 #else
01350     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
01351     __ICIALLU = 0;
01352 #endif
01353     __DSB();     //ensure completion of the invalidation
01354     __ISB();     //ensure instruction fetch path sees new I cache state
01355 }
01356 
01357 /** \brief  Clean D$ by MVA
01358 
01359     DCCMVAC. Data cache clean by MVA to PoC
01360  */
01361 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
01362 #if 1
01363     __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
01364 #else
01365     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
01366     __DCCMVAC = (uint32_t)va;
01367 #endif
01368     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01369 }
01370 
01371 /** \brief  Invalidate D$ by MVA
01372 
01373     DCIMVAC. Data cache invalidate by MVA to PoC
01374  */
01375 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
01376 #if 1
01377     __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
01378 #else
01379     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
01380     __DCIMVAC = (uint32_t)va;
01381 #endif
01382     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01383 }
01384 
01385 /** \brief  Clean and Invalidate D$ by MVA
01386 
01387     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
01388  */
01389 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
01390 #if 1
01391     __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
01392 #else
01393     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
01394     __DCCIMVAC = (uint32_t)va;
01395 #endif
01396     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01397 }
01398 
01399 /** \brief  Clean and Invalidate the entire data or unified cache
01400 
01401     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
01402  */
01403 extern void __v7_all_cache(uint32_t op);
01404 
01405 
01406 /** \brief  Invalidate the whole D$
01407 
01408     DCISW. Invalidate by Set/Way
01409  */
01410 
01411 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
01412     __v7_all_cache(0);
01413 }
01414 
01415 /** \brief  Clean the whole D$
01416 
01417     DCCSW. Clean by Set/Way
01418  */
01419 
01420 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
01421     __v7_all_cache(1);
01422 }
01423 
01424 /** \brief  Clean and invalidate the whole D$
01425 
01426     DCCISW. Clean and Invalidate by Set/Way
01427  */
01428 
01429 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
01430     __v7_all_cache(2);
01431 }
01432 
01433 #include "core_ca_mmu.h"
01434 
01435 #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
01436 
01437 #error TASKING Compiler support not implemented for Cortex-A
01438 
01439 #endif
01440 
01441 /*@} end of CMSIS_Core_RegAccFunctions */
01442 
01443 
01444 #endif /* __CORE_CAFUNC_H__ */