mbed library sources

Dependents:   RPC_Serial_V_mac

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers core_cmInstr.h Source File

core_cmInstr.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     core_cmInstr.h
00003  * @brief    CMSIS Cortex-M Core Instruction Access Header File
00004  * @version  V3.20
00005  * @date     05. March 2013
00006  *
00007  * @note
00008  *
00009  ******************************************************************************/
00010 /* Copyright (c) 2009 - 2013 ARM LIMITED
00011 
00012    All rights reserved.
00013    Redistribution and use in source and binary forms, with or without
00014    modification, are permitted provided that the following conditions are met:
00015    - Redistributions of source code must retain the above copyright
00016      notice, this list of conditions and the following disclaimer.
00017    - Redistributions in binary form must reproduce the above copyright
00018      notice, this list of conditions and the following disclaimer in the
00019      documentation and/or other materials provided with the distribution.
00020    - Neither the name of ARM nor the names of its contributors may be used
00021      to endorse or promote products derived from this software without
00022      specific prior written permission.
00023    *
00024    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00025    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00026    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00027    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
00028    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00029    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00030    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00031    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00032    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00033    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00034    POSSIBILITY OF SUCH DAMAGE.
00035    ---------------------------------------------------------------------------*/
00036 
00037 
00038 #ifndef __CORE_CMINSTR_H
00039 #define __CORE_CMINSTR_H
00040 
00041 
00042 /* ##########################  Core Instruction Access  ######################### */
00043 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
00044   Access to dedicated instructions
00045   @{
00046 */
00047 
00048 #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
00049 /* ARM armcc specific functions */
00050 
00051 #if (__ARMCC_VERSION < 400677)
00052   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
00053 #endif
00054 
00055 
00056 /** \brief  No Operation
00057 
00058     No Operation does nothing. This instruction can be used for code alignment purposes.
00059  */
00060 #define __NOP                             __nop
00061 
00062 
00063 /** \brief  Wait For Interrupt
00064 
00065     Wait For Interrupt is a hint instruction that suspends execution
00066     until one of a number of events occurs.
00067  */
00068 #define __WFI                             __wfi
00069 
00070 
00071 /** \brief  Wait For Event
00072 
00073     Wait For Event is a hint instruction that permits the processor to enter
00074     a low-power state until one of a number of events occurs.
00075  */
00076 #define __WFE                             __wfe
00077 
00078 
00079 /** \brief  Send Event
00080 
00081     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
00082  */
00083 #define __SEV                             __sev
00084 
00085 
00086 /** \brief  Instruction Synchronization Barrier
00087 
00088     Instruction Synchronization Barrier flushes the pipeline in the processor,
00089     so that all instructions following the ISB are fetched from cache or
00090     memory, after the instruction has been completed.
00091  */
00092 #define __ISB()                           __isb(0xF)
00093 
00094 
00095 /** \brief  Data Synchronization Barrier
00096 
00097     This function acts as a special kind of Data Memory Barrier.
00098     It completes when all explicit memory accesses before this instruction complete.
00099  */
00100 #define __DSB()                           __dsb(0xF)
00101 
00102 
00103 /** \brief  Data Memory Barrier
00104 
00105     This function ensures the apparent order of the explicit memory operations before
00106     and after the instruction, without ensuring their completion.
00107  */
00108 #define __DMB()                           __dmb(0xF)
00109 
00110 
00111 /** \brief  Reverse byte order (32 bit)
00112 
00113     This function reverses the byte order in integer value.
00114 
00115     \param [in]    value  Value to reverse
00116     \return               Reversed value
00117  */
00118 #define __REV                             __rev
00119 
00120 
00121 /** \brief  Reverse byte order (16 bit)
00122 
00123     This function reverses the byte order in two unsigned short values.
00124 
00125     \param [in]    value  Value to reverse
00126     \return               Reversed value
00127  */
00128 #ifndef __NO_EMBEDDED_ASM
00129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
00130 {
00131   rev16 r0, r0
00132   bx lr
00133 }
00134 #endif
00135 
00136 /** \brief  Reverse byte order in signed short value
00137 
00138     This function reverses the byte order in a signed short value with sign extension to integer.
00139 
00140     \param [in]    value  Value to reverse
00141     \return               Reversed value
00142  */
00143 #ifndef __NO_EMBEDDED_ASM
00144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
00145 {
00146   revsh r0, r0
00147   bx lr
00148 }
00149 #endif
00150 
00151 
00152 /** \brief  Rotate Right in unsigned value (32 bit)
00153 
00154     This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
00155 
00156     \param [in]    value  Value to rotate
00157     \param [in]    value  Number of Bits to rotate
00158     \return               Rotated value
00159  */
00160 #define __ROR                             __ror
00161 
00162 
00163 /** \brief  Breakpoint
00164 
00165     This function causes the processor to enter Debug state.
00166     Debug tools can use this to investigate system state when the instruction at a particular address is reached.
00167 
00168     \param [in]    value  is ignored by the processor.
00169                    If required, a debugger can use it to store additional information about the breakpoint.
00170  */
00171 #define __BKPT(value)                       __breakpoint(value)
00172 
00173 
00174 #if       (__CORTEX_M >= 0x03)
00175 
00176 /** \brief  Reverse bit order of value
00177 
00178     This function reverses the bit order of the given value.
00179 
00180     \param [in]    value  Value to reverse
00181     \return               Reversed value
00182  */
00183 #define __RBIT                            __rbit
00184 
00185 
00186 /** \brief  LDR Exclusive (8 bit)
00187 
00188     This function performs a exclusive LDR command for 8 bit value.
00189 
00190     \param [in]    ptr  Pointer to data
00191     \return             value of type uint8_t at (*ptr)
00192  */
00193 #define __LDREXB(ptr)                     ((uint8_t ) __ldrex(ptr))
00194 
00195 
00196 /** \brief  LDR Exclusive (16 bit)
00197 
00198     This function performs a exclusive LDR command for 16 bit values.
00199 
00200     \param [in]    ptr  Pointer to data
00201     \return        value of type uint16_t at (*ptr)
00202  */
00203 #define __LDREXH(ptr)                     ((uint16_t) __ldrex(ptr))
00204 
00205 
00206 /** \brief  LDR Exclusive (32 bit)
00207 
00208     This function performs a exclusive LDR command for 32 bit values.
00209 
00210     \param [in]    ptr  Pointer to data
00211     \return        value of type uint32_t at (*ptr)
00212  */
00213 #define __LDREXW(ptr)                     ((uint32_t ) __ldrex(ptr))
00214 
00215 
00216 /** \brief  STR Exclusive (8 bit)
00217 
00218     This function performs a exclusive STR command for 8 bit values.
00219 
00220     \param [in]  value  Value to store
00221     \param [in]    ptr  Pointer to location
00222     \return          0  Function succeeded
00223     \return          1  Function failed
00224  */
00225 #define __STREXB(value, ptr)              __strex(value, ptr)
00226 
00227 
00228 /** \brief  STR Exclusive (16 bit)
00229 
00230     This function performs a exclusive STR command for 16 bit values.
00231 
00232     \param [in]  value  Value to store
00233     \param [in]    ptr  Pointer to location
00234     \return          0  Function succeeded
00235     \return          1  Function failed
00236  */
00237 #define __STREXH(value, ptr)              __strex(value, ptr)
00238 
00239 
00240 /** \brief  STR Exclusive (32 bit)
00241 
00242     This function performs a exclusive STR command for 32 bit values.
00243 
00244     \param [in]  value  Value to store
00245     \param [in]    ptr  Pointer to location
00246     \return          0  Function succeeded
00247     \return          1  Function failed
00248  */
00249 #define __STREXW(value, ptr)              __strex(value, ptr)
00250 
00251 
00252 /** \brief  Remove the exclusive lock
00253 
00254     This function removes the exclusive lock which is created by LDREX.
00255 
00256  */
00257 #define __CLREX                           __clrex
00258 
00259 
00260 /** \brief  Signed Saturate
00261 
00262     This function saturates a signed value.
00263 
00264     \param [in]  value  Value to be saturated
00265     \param [in]    sat  Bit position to saturate to (1..32)
00266     \return             Saturated value
00267  */
00268 #define __SSAT                            __ssat
00269 
00270 
00271 /** \brief  Unsigned Saturate
00272 
00273     This function saturates an unsigned value.
00274 
00275     \param [in]  value  Value to be saturated
00276     \param [in]    sat  Bit position to saturate to (0..31)
00277     \return             Saturated value
00278  */
00279 #define __USAT                            __usat
00280 
00281 
00282 /** \brief  Count leading zeros
00283 
00284     This function counts the number of leading zeros of a data value.
00285 
00286     \param [in]  value  Value to count the leading zeros
00287     \return             number of leading zeros in value
00288  */
00289 #define __CLZ                             __clz
00290 
00291 #endif /* (__CORTEX_M >= 0x03) */
00292 
00293 
00294 
00295 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
00296 /* IAR iccarm specific functions */
00297 
00298 #include <cmsis_iar.h>
00299 
00300 
00301 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
00302 /* TI CCS specific functions */
00303 
00304 #include <cmsis_ccs.h>
00305 
00306 
00307 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
00308 /* GNU gcc specific functions */
00309 
00310 /* Define macros for porting to both thumb1 and thumb2.
00311  * For thumb1, use low register (r0-r7), specified by constrant "l"
00312  * Otherwise, use general registers, specified by constrant "r" */
00313 #if defined (__thumb__) && !defined (__thumb2__)
00314 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
00315 #define __CMSIS_GCC_USE_REG(r) "l" (r)
00316 #else
00317 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
00318 #define __CMSIS_GCC_USE_REG(r) "r" (r)
00319 #endif
00320 
00321 /** \brief  No Operation
00322 
00323     No Operation does nothing. This instruction can be used for code alignment purposes.
00324  */
00325 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
00326 {
00327   __ASM volatile ("nop");
00328 }
00329 
00330 
00331 /** \brief  Wait For Interrupt
00332 
00333     Wait For Interrupt is a hint instruction that suspends execution
00334     until one of a number of events occurs.
00335  */
00336 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
00337 {
00338   __ASM volatile ("wfi");
00339 }
00340 
00341 
00342 /** \brief  Wait For Event
00343 
00344     Wait For Event is a hint instruction that permits the processor to enter
00345     a low-power state until one of a number of events occurs.
00346  */
00347 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
00348 {
00349   __ASM volatile ("wfe");
00350 }
00351 
00352 
00353 /** \brief  Send Event
00354 
00355     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
00356  */
00357 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
00358 {
00359   __ASM volatile ("sev");
00360 }
00361 
00362 
00363 /** \brief  Instruction Synchronization Barrier
00364 
00365     Instruction Synchronization Barrier flushes the pipeline in the processor,
00366     so that all instructions following the ISB are fetched from cache or
00367     memory, after the instruction has been completed.
00368  */
00369 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
00370 {
00371   __ASM volatile ("isb");
00372 }
00373 
00374 
00375 /** \brief  Data Synchronization Barrier
00376 
00377     This function acts as a special kind of Data Memory Barrier.
00378     It completes when all explicit memory accesses before this instruction complete.
00379  */
00380 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
00381 {
00382   __ASM volatile ("dsb");
00383 }
00384 
00385 
00386 /** \brief  Data Memory Barrier
00387 
00388     This function ensures the apparent order of the explicit memory operations before
00389     and after the instruction, without ensuring their completion.
00390  */
00391 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
00392 {
00393   __ASM volatile ("dmb");
00394 }
00395 
00396 
00397 /** \brief  Reverse byte order (32 bit)
00398 
00399     This function reverses the byte order in integer value.
00400 
00401     \param [in]    value  Value to reverse
00402     \return               Reversed value
00403  */
00404 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
00405 {
00406 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
00407   return __builtin_bswap32(value);
00408 #else
00409   uint32_t result;
00410 
00411   __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00412   return(result);
00413 #endif
00414 }
00415 
00416 
00417 /** \brief  Reverse byte order (16 bit)
00418 
00419     This function reverses the byte order in two unsigned short values.
00420 
00421     \param [in]    value  Value to reverse
00422     \return               Reversed value
00423  */
00424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
00425 {
00426   uint32_t result;
00427 
00428   __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00429   return(result);
00430 }
00431 
00432 
00433 /** \brief  Reverse byte order in signed short value
00434 
00435     This function reverses the byte order in a signed short value with sign extension to integer.
00436 
00437     \param [in]    value  Value to reverse
00438     \return               Reversed value
00439  */
00440 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
00441 {
00442 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
00443   return (short)__builtin_bswap16(value);
00444 #else
00445   uint32_t result;
00446 
00447   __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
00448   return(result);
00449 #endif
00450 }
00451 
00452 
00453 /** \brief  Rotate Right in unsigned value (32 bit)
00454 
00455     This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
00456 
00457     \param [in]    value  Value to rotate
00458     \param [in]    value  Number of Bits to rotate
00459     \return               Rotated value
00460  */
00461 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
00462 {
00463   return (op1 >> op2) | (op1 << (32 - op2)); 
00464 }
00465 
00466 
00467 /** \brief  Breakpoint
00468 
00469     This function causes the processor to enter Debug state.
00470     Debug tools can use this to investigate system state when the instruction at a particular address is reached.
00471 
00472     \param [in]    value  is ignored by the processor.
00473                    If required, a debugger can use it to store additional information about the breakpoint.
00474  */
00475 #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
00476 
00477 
00478 #if       (__CORTEX_M >= 0x03)
00479 
00480 /** \brief  Reverse bit order of value
00481 
00482     This function reverses the bit order of the given value.
00483 
00484     \param [in]    value  Value to reverse
00485     \return               Reversed value
00486  */
00487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
00488 {
00489   uint32_t result;
00490 
00491    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
00492    return(result);
00493 }
00494 
00495 
00496 /** \brief  LDR Exclusive (8 bit)
00497 
00498     This function performs a exclusive LDR command for 8 bit value.
00499 
00500     \param [in]    ptr  Pointer to data
00501     \return             value of type uint8_t at (*ptr)
00502  */
00503 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
00504 {
00505     uint32_t result;
00506 
00507 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
00508    __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
00509 #else
00510     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
00511        accepted by assembler. So has to use following less efficient pattern.
00512     */
00513    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
00514 #endif
00515    return(result);
00516 }
00517 
00518 
00519 /** \brief  LDR Exclusive (16 bit)
00520 
00521     This function performs a exclusive LDR command for 16 bit values.
00522 
00523     \param [in]    ptr  Pointer to data
00524     \return        value of type uint16_t at (*ptr)
00525  */
00526 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
00527 {
00528     uint32_t result;
00529 
00530 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
00531    __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
00532 #else
00533     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
00534        accepted by assembler. So has to use following less efficient pattern.
00535     */
00536    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
00537 #endif
00538    return(result);
00539 }
00540 
00541 
00542 /** \brief  LDR Exclusive (32 bit)
00543 
00544     This function performs a exclusive LDR command for 32 bit values.
00545 
00546     \param [in]    ptr  Pointer to data
00547     \return        value of type uint32_t at (*ptr)
00548  */
00549 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
00550 {
00551     uint32_t result;
00552 
00553    __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
00554    return(result);
00555 }
00556 
00557 
00558 /** \brief  STR Exclusive (8 bit)
00559 
00560     This function performs a exclusive STR command for 8 bit values.
00561 
00562     \param [in]  value  Value to store
00563     \param [in]    ptr  Pointer to location
00564     \return          0  Function succeeded
00565     \return          1  Function failed
00566  */
00567 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
00568 {
00569    uint32_t result;
00570 
00571    __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
00572    return(result);
00573 }
00574 
00575 
00576 /** \brief  STR Exclusive (16 bit)
00577 
00578     This function performs a exclusive STR command for 16 bit values.
00579 
00580     \param [in]  value  Value to store
00581     \param [in]    ptr  Pointer to location
00582     \return          0  Function succeeded
00583     \return          1  Function failed
00584  */
00585 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
00586 {
00587    uint32_t result;
00588 
00589    __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
00590    return(result);
00591 }
00592 
00593 
00594 /** \brief  STR Exclusive (32 bit)
00595 
00596     This function performs a exclusive STR command for 32 bit values.
00597 
00598     \param [in]  value  Value to store
00599     \param [in]    ptr  Pointer to location
00600     \return          0  Function succeeded
00601     \return          1  Function failed
00602  */
00603 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
00604 {
00605    uint32_t result;
00606 
00607    __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
00608    return(result);
00609 }
00610 
00611 
00612 /** \brief  Remove the exclusive lock
00613 
00614     This function removes the exclusive lock which is created by LDREX.
00615 
00616  */
00617 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
00618 {
00619   __ASM volatile ("clrex" ::: "memory");
00620 }
00621 
00622 
00623 /** \brief  Signed Saturate
00624 
00625     This function saturates a signed value.
00626 
00627     \param [in]  value  Value to be saturated
00628     \param [in]    sat  Bit position to saturate to (1..32)
00629     \return             Saturated value
00630  */
00631 #define __SSAT(ARG1,ARG2) \
00632 ({                          \
00633   uint32_t __RES, __ARG1 = (ARG1); \
00634   __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
00635   __RES; \
00636  })
00637 
00638 
00639 /** \brief  Unsigned Saturate
00640 
00641     This function saturates an unsigned value.
00642 
00643     \param [in]  value  Value to be saturated
00644     \param [in]    sat  Bit position to saturate to (0..31)
00645     \return             Saturated value
00646  */
00647 #define __USAT(ARG1,ARG2) \
00648 ({                          \
00649   uint32_t __RES, __ARG1 = (ARG1); \
00650   __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
00651   __RES; \
00652  })
00653 
00654 
00655 /** \brief  Count leading zeros
00656 
00657     This function counts the number of leading zeros of a data value.
00658 
00659     \param [in]  value  Value to count the leading zeros
00660     \return             number of leading zeros in value
00661  */
00662 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
00663 {
00664    uint32_t result;
00665 
00666   __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
00667   return(result);
00668 }
00669 
00670 #endif /* (__CORTEX_M >= 0x03) */
00671 
00672 
00673 
00674 
00675 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
00676 /* TASKING carm specific functions */
00677 
00678 /*
00679  * The CMSIS functions have been implemented as intrinsics in the compiler.
00680  * Please use "carm -?i" to get an up to date list of all intrinsics,
00681  * Including the CMSIS ones.
00682  */
00683 
00684 #endif
00685 
00686 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
00687 
00688 #endif /* __CORE_CMINSTR_H */