This fork captures the mbed lib v125 for ease of integration into older projects.

Fork of mbed-dev by mbed official

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers core_caFunc.h Source File

core_caFunc.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     core_caFunc.h
00003  * @brief    CMSIS Cortex-A Core Function Access Header File
00004  * @version  V3.10
00005  * @date     30 Oct 2013
00006  *
00007  * @note
00008  *
00009  ******************************************************************************/
00010 /* Copyright (c) 2009 - 2013 ARM LIMITED
00011 
00012    All rights reserved.
00013    Redistribution and use in source and binary forms, with or without
00014    modification, are permitted provided that the following conditions are met:
00015    - Redistributions of source code must retain the above copyright
00016      notice, this list of conditions and the following disclaimer.
00017    - Redistributions in binary form must reproduce the above copyright
00018      notice, this list of conditions and the following disclaimer in the
00019      documentation and/or other materials provided with the distribution.
00020    - Neither the name of ARM nor the names of its contributors may be used
00021      to endorse or promote products derived from this software without
00022      specific prior written permission.
00023    *
00024    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00025    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00026    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00027    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
00028    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00029    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00030    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00031    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00032    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00033    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00034    POSSIBILITY OF SUCH DAMAGE.
00035    ---------------------------------------------------------------------------*/
00036 
00037 
00038 #ifndef __CORE_CAFUNC_H__
00039 #define __CORE_CAFUNC_H__
00040 
00041 
00042 /* ###########################  Core Function Access  ########################### */
00043 /** \ingroup  CMSIS_Core_FunctionInterface
00044     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00045   @{
00046  */
00047 
00048 #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
00049 /* ARM armcc specific functions */
00050 
00051 #if (__ARMCC_VERSION < 400677)
00052   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
00053 #endif
00054 
00055 #define MODE_USR 0x10
00056 #define MODE_FIQ 0x11
00057 #define MODE_IRQ 0x12
00058 #define MODE_SVC 0x13
00059 #define MODE_MON 0x16
00060 #define MODE_ABT 0x17
00061 #define MODE_HYP 0x1A
00062 #define MODE_UND 0x1B
00063 #define MODE_SYS 0x1F
00064 
00065 /** \brief  Get APSR Register
00066 
00067     This function returns the content of the APSR Register.
00068 
00069     \return               APSR Register value
00070  */
00071 __STATIC_INLINE uint32_t __get_APSR(void)
00072 {
00073   register uint32_t __regAPSR          __ASM("apsr");
00074   return(__regAPSR);
00075 }
00076 
00077 
00078 /** \brief  Get CPSR Register
00079 
00080     This function returns the content of the CPSR Register.
00081 
00082     \return               CPSR Register value
00083  */
00084 __STATIC_INLINE uint32_t __get_CPSR(void)
00085 {
00086   register uint32_t __regCPSR          __ASM("cpsr");
00087   return(__regCPSR);
00088 }
00089 
00090 /** \brief  Set Stack Pointer
00091 
00092     This function assigns the given value to the current stack pointer.
00093 
00094     \param [in]    topOfStack  Stack Pointer value to set
00095  */
00096 register uint32_t __regSP              __ASM("sp");
00097 __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00098 {
00099     __regSP = topOfStack;
00100 }
00101 
00102 
00103 /** \brief  Get link register
00104 
00105     This function returns the value of the link register
00106 
00107     \return    Value of link register
00108  */
00109 register uint32_t __reglr         __ASM("lr");
00110 __STATIC_INLINE uint32_t __get_LR(void)
00111 {
00112   return(__reglr);
00113 }
00114 
00115 /** \brief  Set link register
00116 
00117     This function sets the value of the link register
00118 
00119     \param [in]    lr  LR value to set
00120  */
00121 __STATIC_INLINE void __set_LR(uint32_t lr)
00122 {
00123   __reglr = lr;
00124 }
00125 
00126 /** \brief  Set Process Stack Pointer
00127 
00128     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00129 
00130     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00131  */
00132 __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
00133 {
00134     ARM
00135     PRESERVE8
00136 
00137     BIC     R0, R0, #7  ;ensure stack is 8-byte aligned
00138     MRS     R1, CPSR
00139     CPS     #MODE_SYS   ;no effect in USR mode
00140     MOV     SP, R0
00141     MSR     CPSR_c, R1  ;no effect in USR mode
00142     ISB
00143     BX      LR
00144 
00145 }
00146 
00147 /** \brief  Set User Mode
00148 
00149     This function changes the processor state to User Mode
00150  */
00151 __STATIC_ASM void __set_CPS_USR(void)
00152 {
00153     ARM 
00154 
00155     CPS  #MODE_USR  
00156     BX   LR
00157 }
00158 
00159 
00160 /** \brief  Enable FIQ
00161 
00162     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00163     Can only be executed in Privileged modes.
00164  */
00165 #define __enable_fault_irq                __enable_fiq
00166 
00167 
00168 /** \brief  Disable FIQ
00169 
00170     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00171     Can only be executed in Privileged modes.
00172  */
00173 #define __disable_fault_irq               __disable_fiq
00174 
00175 
00176 /** \brief  Get FPSCR
00177 
00178     This function returns the current value of the Floating Point Status/Control register.
00179 
00180     \return               Floating Point Status/Control register value
00181  */
00182 __STATIC_INLINE uint32_t __get_FPSCR(void)
00183 {
00184 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00185   register uint32_t __regfpscr         __ASM("fpscr");
00186   return(__regfpscr);
00187 #else
00188    return(0);
00189 #endif
00190 }
00191 
00192 
00193 /** \brief  Set FPSCR
00194 
00195     This function assigns the given value to the Floating Point Status/Control register.
00196 
00197     \param [in]    fpscr  Floating Point Status/Control value to set
00198  */
00199 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
00200 {
00201 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00202   register uint32_t __regfpscr         __ASM("fpscr");
00203   __regfpscr = (fpscr);
00204 #endif
00205 }
00206 
00207 /** \brief  Get FPEXC
00208 
00209     This function returns the current value of the Floating Point Exception Control register.
00210 
00211     \return               Floating Point Exception Control register value
00212  */
00213 __STATIC_INLINE uint32_t __get_FPEXC(void)
00214 {
00215 #if (__FPU_PRESENT == 1)
00216   register uint32_t __regfpexc         __ASM("fpexc");
00217   return(__regfpexc);
00218 #else
00219    return(0);
00220 #endif
00221 }
00222 
00223 
00224 /** \brief  Set FPEXC
00225 
00226     This function assigns the given value to the Floating Point Exception Control register.
00227 
00228     \param [in]    fpscr  Floating Point Exception Control value to set
00229  */
00230 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
00231 {
00232 #if (__FPU_PRESENT == 1)
00233   register uint32_t __regfpexc         __ASM("fpexc");
00234   __regfpexc = (fpexc);
00235 #endif
00236 }
00237 
00238 /** \brief  Get CPACR
00239 
00240     This function returns the current value of the Coprocessor Access Control register.
00241 
00242     \return               Coprocessor Access Control register value
00243  */
00244 __STATIC_INLINE uint32_t __get_CPACR(void)
00245 {
00246     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00247     return __regCPACR;
00248 }
00249 
00250 /** \brief  Set CPACR
00251 
00252     This function assigns the given value to the Coprocessor Access Control register.
00253 
00254     \param [in]    cpacr  Coprocessor Acccess Control value to set
00255  */
00256 __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
00257 {
00258     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00259     __regCPACR = cpacr;
00260     __ISB();
00261 }
00262 
00263 /** \brief  Get CBAR
00264 
00265     This function returns the value of the Configuration Base Address register.
00266 
00267     \return               Configuration Base Address register value
00268  */
00269 __STATIC_INLINE uint32_t __get_CBAR() {
00270     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
00271     return(__regCBAR);
00272 }
00273 
00274 /** \brief  Get TTBR0
00275 
00276     This function returns the value of the Translation Table Base Register 0.
00277 
00278     \return               Translation Table Base Register 0 value
00279  */
00280 __STATIC_INLINE uint32_t __get_TTBR0() {
00281     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00282     return(__regTTBR0);
00283 }
00284 
00285 /** \brief  Set TTBR0
00286 
00287     This function assigns the given value to the Translation Table Base Register 0.
00288 
00289     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00290  */
00291 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00292     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00293     __regTTBR0 = ttbr0;
00294     __ISB();
00295 }
00296 
00297 /** \brief  Get DACR
00298 
00299     This function returns the value of the Domain Access Control Register.
00300 
00301     \return               Domain Access Control Register value
00302  */
00303 __STATIC_INLINE uint32_t __get_DACR() {
00304     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00305     return(__regDACR);
00306 }
00307 
00308 /** \brief  Set DACR
00309 
00310     This function assigns the given value to the Domain Access Control Register.
00311 
00312     \param [in]    dacr   Domain Access Control Register value to set
00313  */
00314 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00315     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00316     __regDACR = dacr;
00317     __ISB();
00318 }
00319 
00320 /******************************** Cache and BTAC enable  ****************************************************/
00321 
00322 /** \brief  Set SCTLR
00323 
00324     This function assigns the given value to the System Control Register.
00325 
00326     \param [in]    sctlr  System Control Register value to set
00327  */
00328 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
00329 {
00330     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00331     __regSCTLR = sctlr;
00332 }
00333 
00334 /** \brief  Get SCTLR
00335 
00336     This function returns the value of the System Control Register.
00337 
00338     \return               System Control Register value
00339  */
00340 __STATIC_INLINE uint32_t __get_SCTLR() {
00341     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00342     return(__regSCTLR);
00343 }
00344 
00345 /** \brief  Enable Caches
00346 
00347     Enable Caches
00348  */
00349 __STATIC_INLINE void __enable_caches(void) {
00350     // Set I bit 12 to enable I Cache
00351     // Set C bit  2 to enable D Cache
00352     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00353 }
00354 
00355 /** \brief  Disable Caches
00356 
00357     Disable Caches
00358  */
00359 __STATIC_INLINE void __disable_caches(void) {
00360     // Clear I bit 12 to disable I Cache
00361     // Clear C bit  2 to disable D Cache
00362     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
00363     __ISB();
00364 }
00365 
00366 /** \brief  Enable BTAC
00367 
00368     Enable BTAC
00369  */
00370 __STATIC_INLINE void __enable_btac(void) {
00371     // Set Z bit 11 to enable branch prediction
00372     __set_SCTLR( __get_SCTLR() | (1 << 11));
00373     __ISB();
00374 }
00375 
00376 /** \brief  Disable BTAC
00377 
00378     Disable BTAC
00379  */
00380 __STATIC_INLINE void __disable_btac(void) {
00381     // Clear Z bit 11 to disable branch prediction
00382     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
00383 }
00384 
00385 
00386 /** \brief  Enable MMU
00387 
00388     Enable MMU
00389  */
00390 __STATIC_INLINE void __enable_mmu(void) {
00391     // Set M bit 0 to enable the MMU
00392     // Set AFE bit to enable simplified access permissions model
00393     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00394     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00395     __ISB();
00396 }
00397 
00398 /** \brief  Disable MMU
00399 
00400     Disable MMU
00401  */
00402 __STATIC_INLINE void __disable_mmu(void) {
00403     // Clear M bit 0 to disable the MMU
00404     __set_SCTLR( __get_SCTLR() & ~1);
00405     __ISB();
00406 }
00407 
00408 /******************************** TLB maintenance operations ************************************************/
00409 /** \brief  Invalidate the whole tlb
00410 
00411     TLBIALL. Invalidate the whole tlb
00412  */
00413 
00414 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00415     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
00416     __TLBIALL = 0;
00417     __DSB();
00418     __ISB();
00419 }
00420 
00421 /******************************** BTB maintenance operations ************************************************/
00422 /** \brief  Invalidate entire branch predictor array
00423 
00424     BPIALL. Branch Predictor Invalidate All.
00425  */
00426 
00427 __STATIC_INLINE void __v7_inv_btac(void) {
00428     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
00429     __BPIALL  = 0;
00430     __DSB();     //ensure completion of the invalidation
00431     __ISB();     //ensure instruction fetch path sees new state
00432 }
00433 
00434 
00435 /******************************** L1 cache operations ******************************************************/
00436 
00437 /** \brief  Invalidate the whole I$
00438 
00439     ICIALLU. Instruction Cache Invalidate All to PoU
00440  */
00441 __STATIC_INLINE void __v7_inv_icache_all(void) {
00442     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
00443     __ICIALLU = 0;
00444     __DSB();     //ensure completion of the invalidation
00445     __ISB();     //ensure instruction fetch path sees new I cache state
00446 }
00447 
00448 /** \brief  Clean D$ by MVA
00449 
00450     DCCMVAC. Data cache clean by MVA to PoC
00451  */
00452 __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
00453     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
00454     __DCCMVAC = (uint32_t)va;
00455     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00456 }
00457 
00458 /** \brief  Invalidate D$ by MVA
00459 
00460     DCIMVAC. Data cache invalidate by MVA to PoC
00461  */
00462 __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
00463     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
00464     __DCIMVAC = (uint32_t)va;
00465     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00466 }
00467 
00468 /** \brief  Clean and Invalidate D$ by MVA
00469 
00470     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
00471  */
00472 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
00473     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
00474     __DCCIMVAC = (uint32_t)va;
00475     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00476 }
00477 
00478 /** \brief  Clean and Invalidate the entire data or unified cache
00479 
00480     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
00481  */
00482 #pragma push
00483 #pragma arm
00484 __STATIC_ASM void __v7_all_cache(uint32_t op) {
00485         ARM 
00486 
00487         PUSH    {R4-R11}
00488 
00489         MRC     p15, 1, R6, c0, c0, 1      // Read CLIDR
00490         ANDS    R3, R6, #0x07000000        // Extract coherency level
00491         MOV     R3, R3, LSR #23            // Total cache levels << 1
00492         BEQ     Finished                   // If 0, no need to clean
00493 
00494         MOV     R10, #0                    // R10 holds current cache level << 1
00495 Loop1   ADD     R2, R10, R10, LSR #1       // R2 holds cache "Set" position
00496         MOV     R1, R6, LSR R2             // Bottom 3 bits are the Cache-type for this level
00497         AND     R1, R1, #7                 // Isolate those lower 3 bits
00498         CMP     R1, #2
00499         BLT     Skip                       // No cache or only instruction cache at this level
00500 
00501         MCR     p15, 2, R10, c0, c0, 0     // Write the Cache Size selection register
00502         ISB                                // ISB to sync the change to the CacheSizeID reg
00503         MRC     p15, 1, R1, c0, c0, 0      // Reads current Cache Size ID register
00504         AND     R2, R1, #7                 // Extract the line length field
00505         ADD     R2, R2, #4                 // Add 4 for the line length offset (log2 16 bytes)
00506         LDR     R4, =0x3FF
00507         ANDS    R4, R4, R1, LSR #3         // R4 is the max number on the way size (right aligned)
00508         CLZ     R5, R4                     // R5 is the bit position of the way size increment
00509         LDR     R7, =0x7FFF
00510         ANDS    R7, R7, R1, LSR #13        // R7 is the max number of the index size (right aligned)
00511 
00512 Loop2   MOV     R9, R4                     // R9 working copy of the max way size (right aligned)
00513 
00514 Loop3   ORR     R11, R10, R9, LSL R5       // Factor in the Way number and cache number into R11
00515         ORR     R11, R11, R7, LSL R2       // Factor in the Set number
00516         CMP     R0, #0
00517         BNE     Dccsw
00518         MCR     p15, 0, R11, c7, c6, 2     // DCISW. Invalidate by Set/Way
00519         B       cont
00520 Dccsw   CMP     R0, #1
00521         BNE     Dccisw
00522         MCR     p15, 0, R11, c7, c10, 2    // DCCSW. Clean by Set/Way
00523         B       cont
00524 Dccisw  MCR     p15, 0, R11, c7, c14, 2    // DCCISW. Clean and Invalidate by Set/Way
00525 cont    SUBS    R9, R9, #1                 // Decrement the Way number
00526         BGE     Loop3
00527         SUBS    R7, R7, #1                 // Decrement the Set number
00528         BGE     Loop2
00529 Skip    ADD     R10, R10, #2               // Increment the cache number
00530         CMP     R3, R10
00531         BGT     Loop1
00532 
00533 Finished
00534         DSB
00535         POP    {R4-R11}
00536         BX     lr
00537 
00538 }
00539 #pragma pop
00540 
00541 
00542 /** \brief  Invalidate the whole D$
00543 
00544     DCISW. Invalidate by Set/Way
00545  */
00546 
00547 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00548     __v7_all_cache(0);
00549 }
00550 
00551 /** \brief  Clean the whole D$
00552 
00553     DCCSW. Clean by Set/Way
00554  */
00555 
00556 __STATIC_INLINE void __v7_clean_dcache_all(void) {
00557     __v7_all_cache(1);
00558 }
00559 
00560 /** \brief  Clean and invalidate the whole D$
00561 
00562     DCCISW. Clean and Invalidate by Set/Way
00563  */
00564 
00565 __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
00566     __v7_all_cache(2);
00567 }
00568 
00569 #include "core_ca_mmu.h"
00570 
00571 #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
00572 
00573 #define __inline inline
00574 
00575 inline static uint32_t __disable_irq_iar() {
00576   int irq_dis = __get_CPSR() & 0x80;      // 7bit CPSR.I
00577   __disable_irq();
00578   return irq_dis;
00579 }
00580 
00581 #define MODE_USR 0x10
00582 #define MODE_FIQ 0x11
00583 #define MODE_IRQ 0x12
00584 #define MODE_SVC 0x13
00585 #define MODE_MON 0x16
00586 #define MODE_ABT 0x17
00587 #define MODE_HYP 0x1A
00588 #define MODE_UND 0x1B
00589 #define MODE_SYS 0x1F
00590 
00591 /** \brief  Set Process Stack Pointer
00592 
00593     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00594 
00595     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00596  */
00597 // from rt_CMSIS.c
00598 __arm static inline void __set_PSP(uint32_t topOfProcStack) {
00599 __asm(
00600   "    ARM\n"
00601 //  "    PRESERVE8\n"
00602 
00603   "    BIC     R0, R0, #7  ;ensure stack is 8-byte aligned \n"
00604   "    MRS     R1, CPSR \n"
00605   "    CPS     #0x1F   ;no effect in USR mode \n"        // MODE_SYS
00606   "    MOV     SP, R0 \n"
00607   "    MSR     CPSR_c, R1  ;no effect in USR mode \n"
00608   "    ISB \n"
00609   "    BX      LR \n");
00610 }
00611 
00612 /** \brief  Set User Mode
00613 
00614     This function changes the processor state to User Mode
00615  */
00616 // from rt_CMSIS.c
00617 __arm static inline void __set_CPS_USR(void) {
00618 __asm(
00619   "    ARM \n"
00620 
00621   "    CPS  #0x10  \n"                  // MODE_USR
00622   "    BX   LR\n");
00623 }
00624 
00625 /** \brief  Set TTBR0
00626 
00627     This function assigns the given value to the Translation Table Base Register 0.
00628 
00629     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00630  */
00631 // from mmu_Renesas_RZ_A1.c
00632 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00633     __MCR(15, 0, ttbr0, 2, 0, 0);      // reg to cp15
00634     __ISB();
00635 }
00636 
00637 /** \brief  Set DACR
00638 
00639     This function assigns the given value to the Domain Access Control Register.
00640 
00641     \param [in]    dacr   Domain Access Control Register value to set
00642  */
00643 // from mmu_Renesas_RZ_A1.c
00644 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00645     __MCR(15, 0, dacr, 3, 0, 0);      // reg to cp15
00646     __ISB();
00647 }
00648 
00649 
00650 /******************************** Cache and BTAC enable  ****************************************************/
00651 /** \brief  Set SCTLR
00652 
00653     This function assigns the given value to the System Control Register.
00654 
00655     \param [in]    sctlr  System Control Register value to set
00656  */
00657 // from __enable_mmu()
00658 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr) {
00659     __MCR(15, 0, sctlr, 1, 0, 0);      // reg to cp15
00660 }
00661 
00662 /** \brief  Get SCTLR
00663 
00664     This function returns the value of the System Control Register.
00665 
00666     \return               System Control Register value
00667  */
00668 // from __enable_mmu()
00669 __STATIC_INLINE uint32_t __get_SCTLR() {
00670     uint32_t __regSCTLR = __MRC(15, 0, 1, 0, 0);
00671     return __regSCTLR;
00672 }
00673 
00674 /** \brief  Enable Caches
00675 
00676     Enable Caches
00677  */
00678 // from system_Renesas_RZ_A1.c
00679 __STATIC_INLINE void __enable_caches(void) {
00680     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00681 }
00682 
00683 /** \brief  Enable BTAC
00684 
00685     Enable BTAC
00686  */
00687 // from system_Renesas_RZ_A1.c
00688 __STATIC_INLINE void __enable_btac(void) {
00689     __set_SCTLR( __get_SCTLR() | (1 << 11));
00690     __ISB();
00691 }
00692 
00693 /** \brief  Enable MMU
00694 
00695     Enable MMU
00696  */
00697 // from system_Renesas_RZ_A1.c
00698 __STATIC_INLINE void __enable_mmu(void) {
00699     // Set M bit 0 to enable the MMU
00700     // Set AFE bit to enable simplified access permissions model
00701     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00702     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00703     __ISB();
00704 }
00705 
00706 /******************************** TLB maintenance operations ************************************************/
00707 /** \brief  Invalidate the whole tlb
00708 
00709     TLBIALL. Invalidate the whole tlb
00710  */
00711 // from system_Renesas_RZ_A1.c
00712 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00713     uint32_t val = 0;
00714     __MCR(15, 0, val, 8, 7, 0);      // reg to cp15
00715     __MCR(15, 0, val, 8, 6, 0);      // reg to cp15
00716     __MCR(15, 0, val, 8, 5, 0);      // reg to cp15
00717     __DSB();
00718     __ISB();
00719 }
00720 
00721 /******************************** BTB maintenance operations ************************************************/
00722 /** \brief  Invalidate entire branch predictor array
00723 
00724     BPIALL. Branch Predictor Invalidate All.
00725  */
00726 // from system_Renesas_RZ_A1.c
00727 __STATIC_INLINE void __v7_inv_btac(void) {
00728     uint32_t val = 0;
00729     __MCR(15, 0, val, 7, 5, 6);      // reg to cp15
00730     __DSB();     //ensure completion of the invalidation
00731     __ISB();     //ensure instruction fetch path sees new state
00732 }
00733 
00734 
00735 /******************************** L1 cache operations ******************************************************/
00736 
00737 /** \brief  Invalidate the whole I$
00738 
00739     ICIALLU. Instruction Cache Invalidate All to PoU
00740  */
00741 // from system_Renesas_RZ_A1.c
00742 __STATIC_INLINE void __v7_inv_icache_all(void) {
00743     uint32_t val = 0;
00744     __MCR(15, 0, val, 7, 5, 0);      // reg to cp15
00745     __DSB();     //ensure completion of the invalidation
00746     __ISB();     //ensure instruction fetch path sees new I cache state
00747 }
00748 
00749 // from __v7_inv_dcache_all()
00750 __arm static inline void __v7_all_cache(uint32_t op) {
00751 __asm(
00752     "        ARM \n"
00753 
00754     "        PUSH    {R4-R11} \n"
00755 
00756     "        MRC     p15, 1, R6, c0, c0, 1\n"      // Read CLIDR
00757     "        ANDS    R3, R6, #0x07000000\n"        // Extract coherency level
00758     "        MOV     R3, R3, LSR #23\n"            // Total cache levels << 1
00759     "        BEQ     Finished\n"                   // If 0, no need to clean
00760 
00761     "        MOV     R10, #0\n"                    // R10 holds current cache level << 1
00762     "Loop1:   ADD     R2, R10, R10, LSR #1\n"       // R2 holds cache "Set" position
00763     "        MOV     R1, R6, LSR R2 \n"            // Bottom 3 bits are the Cache-type for this level
00764     "        AND     R1, R1, #7 \n"                // Isolate those lower 3 bits
00765     "        CMP     R1, #2 \n"
00766     "        BLT     Skip \n"                      // No cache or only instruction cache at this level
00767 
00768     "        MCR     p15, 2, R10, c0, c0, 0 \n"    // Write the Cache Size selection register
00769     "        ISB \n"                               // ISB to sync the change to the CacheSizeID reg
00770     "        MRC     p15, 1, R1, c0, c0, 0 \n"     // Reads current Cache Size ID register
00771     "        AND     R2, R1, #7 \n"                // Extract the line length field
00772     "        ADD     R2, R2, #4 \n"                // Add 4 for the line length offset (log2 16 bytes)
00773     "        movw    R4, #0x3FF \n"
00774     "        ANDS    R4, R4, R1, LSR #3 \n"        // R4 is the max number on the way size (right aligned)
00775     "        CLZ     R5, R4 \n"                    // R5 is the bit position of the way size increment
00776     "        movw    R7, #0x7FFF \n"
00777     "        ANDS    R7, R7, R1, LSR #13 \n"       // R7 is the max number of the index size (right aligned)
00778 
00779     "Loop2:   MOV     R9, R4 \n"                    // R9 working copy of the max way size (right aligned)
00780 
00781     "Loop3:   ORR     R11, R10, R9, LSL R5 \n"      // Factor in the Way number and cache number into R11
00782     "        ORR     R11, R11, R7, LSL R2 \n"      // Factor in the Set number
00783     "        CMP     R0, #0 \n"
00784     "        BNE     Dccsw \n"
00785     "        MCR     p15, 0, R11, c7, c6, 2 \n"    // DCISW. Invalidate by Set/Way
00786     "        B       cont \n"
00787     "Dccsw:   CMP     R0, #1 \n"
00788     "        BNE     Dccisw \n"
00789     "        MCR     p15, 0, R11, c7, c10, 2 \n"   // DCCSW. Clean by Set/Way
00790     "        B       cont \n"
00791     "Dccisw:  MCR     p15, 0, R11, c7, c14, 2 \n"   // DCCISW, Clean and Invalidate by Set/Way
00792     "cont:    SUBS    R9, R9, #1 \n"                // Decrement the Way number
00793     "        BGE     Loop3 \n"
00794     "        SUBS    R7, R7, #1 \n"                // Decrement the Set number
00795     "        BGE     Loop2 \n"
00796     "Skip:    ADD     R10, R10, #2 \n"              // increment the cache number
00797     "        CMP     R3, R10 \n"
00798     "        BGT     Loop1 \n"
00799 
00800     "Finished: \n"
00801     "        DSB \n"
00802     "        POP    {R4-R11} \n"
00803     "        BX     lr \n" );
00804 }
00805 
00806 /** \brief  Invalidate the whole D$
00807 
00808     DCISW. Invalidate by Set/Way
00809  */
00810 // from system_Renesas_RZ_A1.c
00811 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00812     __v7_all_cache(0);
00813 }
00814 /** \brief  Clean and Invalidate D$ by MVA
00815 
00816     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
00817  */
00818 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
00819     __MCR(15, 0, (uint32_t)va, 7, 14, 1);
00820     __DMB();
00821 }
00822 
00823 #include "core_ca_mmu.h"
00824 
00825 #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
00826 /* GNU gcc specific functions */
00827 
00828 #define MODE_USR 0x10
00829 #define MODE_FIQ 0x11
00830 #define MODE_IRQ 0x12
00831 #define MODE_SVC 0x13
00832 #define MODE_MON 0x16
00833 #define MODE_ABT 0x17
00834 #define MODE_HYP 0x1A
00835 #define MODE_UND 0x1B
00836 #define MODE_SYS 0x1F
00837 
00838 
00839 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
00840 {
00841     __ASM volatile ("cpsie i");
00842 }
00843 
00844 /** \brief  Disable IRQ Interrupts
00845 
00846   This function disables IRQ interrupts by setting the I-bit in the CPSR.
00847   Can only be executed in Privileged modes.
00848  */
00849 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
00850 {
00851     uint32_t result;
00852 
00853     __ASM volatile ("mrs %0, cpsr" : "=r" (result));
00854     __ASM volatile ("cpsid i");
00855     return(result & 0x80);
00856 }
00857 
00858 
00859 /** \brief  Get APSR Register
00860 
00861     This function returns the content of the APSR Register.
00862 
00863     \return               APSR Register value
00864  */
00865 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
00866 {
00867 #if 1
00868   register uint32_t __regAPSR;
00869   __ASM volatile ("mrs %0, apsr" : "=r" (__regAPSR) );
00870 #else
00871   register uint32_t __regAPSR          __ASM("apsr");
00872 #endif
00873   return(__regAPSR);
00874 }
00875 
00876 
00877 /** \brief  Get CPSR Register
00878 
00879     This function returns the content of the CPSR Register.
00880 
00881     \return               CPSR Register value
00882  */
00883 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
00884 {
00885 #if 1
00886   register uint32_t __regCPSR;
00887   __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
00888 #else
00889   register uint32_t __regCPSR          __ASM("cpsr");
00890 #endif
00891   return(__regCPSR);
00892 }
00893 
00894 #if 0
00895 /** \brief  Set Stack Pointer
00896 
00897     This function assigns the given value to the current stack pointer.
00898 
00899     \param [in]    topOfStack  Stack Pointer value to set
00900  */
00901 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00902 {
00903     register uint32_t __regSP       __ASM("sp");
00904     __regSP = topOfStack;
00905 }
00906 #endif
00907 
00908 /** \brief  Get link register
00909 
00910     This function returns the value of the link register
00911 
00912     \return    Value of link register
00913  */
00914 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
00915 {
00916   register uint32_t __reglr         __ASM("lr");
00917   return(__reglr);
00918 }
00919 
00920 #if 0
00921 /** \brief  Set link register
00922 
00923     This function sets the value of the link register
00924 
00925     \param [in]    lr  LR value to set
00926  */
00927 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
00928 {
00929   register uint32_t __reglr         __ASM("lr");
00930   __reglr = lr;
00931 }
00932 #endif
00933 
00934 /** \brief  Set Process Stack Pointer
00935 
00936     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00937 
00938     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00939  */
00940 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
00941 {
00942     __asm__ volatile (
00943     ".ARM;"
00944     ".eabi_attribute Tag_ABI_align8_preserved,1;"
00945 
00946     "BIC     R0, R0, #7;" /* ;ensure stack is 8-byte aligned */
00947     "MRS     R1, CPSR;"
00948     "CPS     %0;"         /* ;no effect in USR mode */
00949     "MOV     SP, R0;"
00950     "MSR     CPSR_c, R1;" /* ;no effect in USR mode */
00951     "ISB;"
00952     //"BX      LR;"
00953     :
00954     : "i"(MODE_SYS)
00955     : "r0", "r1");
00956     return;
00957 }
00958 
00959 /** \brief  Set User Mode
00960 
00961     This function changes the processor state to User Mode
00962  */
00963 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPS_USR(void)
00964 {
00965     __asm__ volatile (
00966     ".ARM;"
00967 
00968     "CPS  %0;"
00969     //"BX   LR;"
00970     :
00971     : "i"(MODE_USR)
00972     : );
00973     return;
00974 }
00975 
00976 
00977 /** \brief  Enable FIQ
00978 
00979     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00980     Can only be executed in Privileged modes.
00981  */
00982 #define __enable_fault_irq()                __asm__ volatile ("cpsie f")
00983 
00984 
00985 /** \brief  Disable FIQ
00986 
00987     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00988     Can only be executed in Privileged modes.
00989  */
00990 #define __disable_fault_irq()               __asm__ volatile ("cpsid f")
00991 
00992 
00993 /** \brief  Get FPSCR
00994 
00995     This function returns the current value of the Floating Point Status/Control register.
00996 
00997     \return               Floating Point Status/Control register value
00998  */
00999 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
01000 {
01001 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
01002 #if 1
01003     uint32_t result;
01004 
01005     __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
01006     return (result);
01007 #else
01008   register uint32_t __regfpscr         __ASM("fpscr");
01009   return(__regfpscr);
01010 #endif
01011 #else
01012    return(0);
01013 #endif
01014 }
01015 
01016 
01017 /** \brief  Set FPSCR
01018 
01019     This function assigns the given value to the Floating Point Status/Control register.
01020 
01021     \param [in]    fpscr  Floating Point Status/Control value to set
01022  */
01023 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
01024 {
01025 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
01026 #if 1
01027     __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
01028 #else
01029   register uint32_t __regfpscr         __ASM("fpscr");
01030   __regfpscr = (fpscr);
01031 #endif
01032 #endif
01033 }
01034 
01035 /** \brief  Get FPEXC
01036 
01037     This function returns the current value of the Floating Point Exception Control register.
01038 
01039     \return               Floating Point Exception Control register value
01040  */
01041 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
01042 {
01043 #if (__FPU_PRESENT == 1)
01044 #if 1
01045     uint32_t result;
01046 
01047     __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
01048     return (result);
01049 #else
01050   register uint32_t __regfpexc         __ASM("fpexc");
01051   return(__regfpexc);
01052 #endif
01053 #else
01054    return(0);
01055 #endif
01056 }
01057 
01058 
01059 /** \brief  Set FPEXC
01060 
01061     This function assigns the given value to the Floating Point Exception Control register.
01062 
01063     \param [in]    fpscr  Floating Point Exception Control value to set
01064  */
01065 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
01066 {
01067 #if (__FPU_PRESENT == 1)
01068 #if 1
01069     __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
01070 #else
01071   register uint32_t __regfpexc         __ASM("fpexc");
01072   __regfpexc = (fpexc);
01073 #endif
01074 #endif
01075 }
01076 
01077 /** \brief  Get CPACR
01078 
01079     This function returns the current value of the Coprocessor Access Control register.
01080 
01081     \return               Coprocessor Access Control register value
01082  */
01083 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
01084 {
01085 #if 1
01086     register uint32_t __regCPACR;
01087     __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
01088 #else
01089     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
01090 #endif
01091     return __regCPACR;
01092 }
01093 
01094 /** \brief  Set CPACR
01095 
01096     This function assigns the given value to the Coprocessor Access Control register.
01097 
01098     \param [in]    cpacr  Coprocessor Acccess Control value to set
01099  */
01100 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
01101 {
01102 #if 1
01103     __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
01104 #else
01105     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
01106     __regCPACR = cpacr;
01107 #endif
01108     __ISB();
01109 }
01110 
01111 /** \brief  Get CBAR
01112 
01113     This function returns the value of the Configuration Base Address register.
01114 
01115     \return               Configuration Base Address register value
01116  */
01117 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
01118 #if 1
01119     register uint32_t __regCBAR;
01120     __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
01121 #else
01122     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
01123 #endif
01124     return(__regCBAR);
01125 }
01126 
01127 /** \brief  Get TTBR0
01128 
01129     This function returns the value of the Translation Table Base Register 0.
01130 
01131     \return               Translation Table Base Register 0 value
01132  */
01133 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
01134 #if 1
01135     register uint32_t __regTTBR0;
01136     __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
01137 #else
01138     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
01139 #endif
01140     return(__regTTBR0);
01141 }
01142 
01143 /** \brief  Set TTBR0
01144 
01145     This function assigns the given value to the Translation Table Base Register 0.
01146 
01147     \param [in]    ttbr0  Translation Table Base Register 0 value to set
01148  */
01149 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
01150 #if 1
01151     __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
01152 #else
01153     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
01154     __regTTBR0 = ttbr0;
01155 #endif
01156     __ISB();
01157 }
01158 
01159 /** \brief  Get DACR
01160 
01161     This function returns the value of the Domain Access Control Register.
01162 
01163     \return               Domain Access Control Register value
01164  */
01165 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
01166 #if 1
01167     register uint32_t __regDACR;
01168     __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
01169 #else
01170     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
01171 #endif
01172     return(__regDACR);
01173 }
01174 
01175 /** \brief  Set DACR
01176 
01177     This function assigns the given value to the Domain Access Control Register.
01178 
01179     \param [in]    dacr   Domain Access Control Register value to set
01180  */
01181 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
01182 #if 1
01183     __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
01184 #else
01185     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
01186     __regDACR = dacr;
01187 #endif
01188     __ISB();
01189 }
01190 
01191 /******************************** Cache and BTAC enable  ****************************************************/
01192 
01193 /** \brief  Set SCTLR
01194 
01195     This function assigns the given value to the System Control Register.
01196 
01197     \param [in]    sctlr  System Control Register value to set
01198  */
01199 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
01200 {
01201 #if 1
01202     __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
01203 #else
01204     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
01205     __regSCTLR = sctlr;
01206 #endif
01207 }
01208 
01209 /** \brief  Get SCTLR
01210 
01211     This function returns the value of the System Control Register.
01212 
01213     \return               System Control Register value
01214  */
01215 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
01216 #if 1
01217     register uint32_t __regSCTLR;
01218     __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
01219 #else
01220     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
01221 #endif
01222     return(__regSCTLR);
01223 }
01224 
01225 /** \brief  Enable Caches
01226 
01227     Enable Caches
01228  */
01229 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
01230     // Set I bit 12 to enable I Cache
01231     // Set C bit  2 to enable D Cache
01232     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
01233 }
01234 
01235 /** \brief  Disable Caches
01236 
01237     Disable Caches
01238  */
01239 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
01240     // Clear I bit 12 to disable I Cache
01241     // Clear C bit  2 to disable D Cache
01242     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
01243     __ISB();
01244 }
01245 
01246 /** \brief  Enable BTAC
01247 
01248     Enable BTAC
01249  */
01250 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
01251     // Set Z bit 11 to enable branch prediction
01252     __set_SCTLR( __get_SCTLR() | (1 << 11));
01253     __ISB();
01254 }
01255 
01256 /** \brief  Disable BTAC
01257 
01258     Disable BTAC
01259  */
01260 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
01261     // Clear Z bit 11 to disable branch prediction
01262     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
01263 }
01264 
01265 
01266 /** \brief  Enable MMU
01267 
01268     Enable MMU
01269  */
01270 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
01271     // Set M bit 0 to enable the MMU
01272     // Set AFE bit to enable simplified access permissions model
01273     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
01274     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
01275     __ISB();
01276 }
01277 
01278 /** \brief  Disable MMU
01279 
01280     Disable MMU
01281  */
01282 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
01283     // Clear M bit 0 to disable the MMU
01284     __set_SCTLR( __get_SCTLR() & ~1);
01285     __ISB();
01286 }
01287 
01288 /******************************** TLB maintenance operations ************************************************/
01289 /** \brief  Invalidate the whole tlb
01290 
01291     TLBIALL. Invalidate the whole tlb
01292  */
01293 
01294 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
01295 #if 1
01296     __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
01297 #else
01298     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
01299     __TLBIALL = 0;
01300 #endif
01301     __DSB();
01302     __ISB();
01303 }
01304 
01305 /******************************** BTB maintenance operations ************************************************/
01306 /** \brief  Invalidate entire branch predictor array
01307 
01308     BPIALL. Branch Predictor Invalidate All.
01309  */
01310 
01311 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
01312 #if 1
01313     __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
01314 #else
01315     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
01316     __BPIALL  = 0;
01317 #endif
01318     __DSB();     //ensure completion of the invalidation
01319     __ISB();     //ensure instruction fetch path sees new state
01320 }
01321 
01322 
01323 /******************************** L1 cache operations ******************************************************/
01324 
01325 /** \brief  Invalidate the whole I$
01326 
01327     ICIALLU. Instruction Cache Invalidate All to PoU
01328  */
01329 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
01330 #if 1
01331     __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
01332 #else
01333     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
01334     __ICIALLU = 0;
01335 #endif
01336     __DSB();     //ensure completion of the invalidation
01337     __ISB();     //ensure instruction fetch path sees new I cache state
01338 }
01339 
01340 /** \brief  Clean D$ by MVA
01341 
01342     DCCMVAC. Data cache clean by MVA to PoC
01343  */
01344 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
01345 #if 1
01346     __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
01347 #else
01348     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
01349     __DCCMVAC = (uint32_t)va;
01350 #endif
01351     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01352 }
01353 
01354 /** \brief  Invalidate D$ by MVA
01355 
01356     DCIMVAC. Data cache invalidate by MVA to PoC
01357  */
01358 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
01359 #if 1
01360     __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
01361 #else
01362     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
01363     __DCIMVAC = (uint32_t)va;
01364 #endif
01365     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01366 }
01367 
01368 /** \brief  Clean and Invalidate D$ by MVA
01369 
01370     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
01371  */
01372 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
01373 #if 1
01374     __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
01375 #else
01376     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
01377     __DCCIMVAC = (uint32_t)va;
01378 #endif
01379     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01380 }
01381 
01382 /** \brief  Clean and Invalidate the entire data or unified cache
01383 
01384     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
01385  */
01386 extern void __v7_all_cache(uint32_t op);
01387 
01388 
01389 /** \brief  Invalidate the whole D$
01390 
01391     DCISW. Invalidate by Set/Way
01392  */
01393 
01394 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
01395     __v7_all_cache(0);
01396 }
01397 
01398 /** \brief  Clean the whole D$
01399 
01400     DCCSW. Clean by Set/Way
01401  */
01402 
01403 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
01404     __v7_all_cache(1);
01405 }
01406 
01407 /** \brief  Clean and invalidate the whole D$
01408 
01409     DCCISW. Clean and Invalidate by Set/Way
01410  */
01411 
01412 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
01413     __v7_all_cache(2);
01414 }
01415 
01416 #include "core_ca_mmu.h"
01417 
01418 #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
01419 
01420 #error TASKING Compiler support not implemented for Cortex-A
01421 
01422 #endif
01423 
01424 /*@} end of CMSIS_Core_RegAccFunctions */
01425 
01426 
01427 #endif /* __CORE_CAFUNC_H__ */