meh

Fork of mbed by mbed official

Committer:
emilmont
Date:
Tue Feb 18 15:02:39 2014 +0000
Revision:
78:ed8466a608b4
Child:
110:165afa46840b
Add KL05Z Target
Fix LPC11XX InterruptIn
Fix NUCLEO boards us_ticker
Fix NUCLEO_L152RE AnalogOut

Who changed what in which revision?

UserRevisionLine numberNew contents of line
emilmont 78:ed8466a608b4 1 /**************************************************************************//**
emilmont 78:ed8466a608b4 2 * @file core_cmInstr.h
emilmont 78:ed8466a608b4 3 * @brief CMSIS Cortex-M Core Instruction Access Header File
emilmont 78:ed8466a608b4 4 * @version V3.20
emilmont 78:ed8466a608b4 5 * @date 05. March 2013
emilmont 78:ed8466a608b4 6 *
emilmont 78:ed8466a608b4 7 * @note
emilmont 78:ed8466a608b4 8 *
emilmont 78:ed8466a608b4 9 ******************************************************************************/
emilmont 78:ed8466a608b4 10 /* Copyright (c) 2009 - 2013 ARM LIMITED
emilmont 78:ed8466a608b4 11
emilmont 78:ed8466a608b4 12 All rights reserved.
emilmont 78:ed8466a608b4 13 Redistribution and use in source and binary forms, with or without
emilmont 78:ed8466a608b4 14 modification, are permitted provided that the following conditions are met:
emilmont 78:ed8466a608b4 15 - Redistributions of source code must retain the above copyright
emilmont 78:ed8466a608b4 16 notice, this list of conditions and the following disclaimer.
emilmont 78:ed8466a608b4 17 - Redistributions in binary form must reproduce the above copyright
emilmont 78:ed8466a608b4 18 notice, this list of conditions and the following disclaimer in the
emilmont 78:ed8466a608b4 19 documentation and/or other materials provided with the distribution.
emilmont 78:ed8466a608b4 20 - Neither the name of ARM nor the names of its contributors may be used
emilmont 78:ed8466a608b4 21 to endorse or promote products derived from this software without
emilmont 78:ed8466a608b4 22 specific prior written permission.
emilmont 78:ed8466a608b4 23 *
emilmont 78:ed8466a608b4 24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
emilmont 78:ed8466a608b4 25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
emilmont 78:ed8466a608b4 26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
emilmont 78:ed8466a608b4 27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
emilmont 78:ed8466a608b4 28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
emilmont 78:ed8466a608b4 29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
emilmont 78:ed8466a608b4 30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
emilmont 78:ed8466a608b4 31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
emilmont 78:ed8466a608b4 32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
emilmont 78:ed8466a608b4 33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
emilmont 78:ed8466a608b4 34 POSSIBILITY OF SUCH DAMAGE.
emilmont 78:ed8466a608b4 35 ---------------------------------------------------------------------------*/
emilmont 78:ed8466a608b4 36
emilmont 78:ed8466a608b4 37
emilmont 78:ed8466a608b4 38 #ifndef __CORE_CMINSTR_H
emilmont 78:ed8466a608b4 39 #define __CORE_CMINSTR_H
emilmont 78:ed8466a608b4 40
emilmont 78:ed8466a608b4 41
emilmont 78:ed8466a608b4 42 /* ########################## Core Instruction Access ######################### */
emilmont 78:ed8466a608b4 43 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
emilmont 78:ed8466a608b4 44 Access to dedicated instructions
emilmont 78:ed8466a608b4 45 @{
emilmont 78:ed8466a608b4 46 */
emilmont 78:ed8466a608b4 47
emilmont 78:ed8466a608b4 48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
emilmont 78:ed8466a608b4 49 /* ARM armcc specific functions */
emilmont 78:ed8466a608b4 50
emilmont 78:ed8466a608b4 51 #if (__ARMCC_VERSION < 400677)
emilmont 78:ed8466a608b4 52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
emilmont 78:ed8466a608b4 53 #endif
emilmont 78:ed8466a608b4 54
emilmont 78:ed8466a608b4 55
emilmont 78:ed8466a608b4 56 /** \brief No Operation
emilmont 78:ed8466a608b4 57
emilmont 78:ed8466a608b4 58 No Operation does nothing. This instruction can be used for code alignment purposes.
emilmont 78:ed8466a608b4 59 */
emilmont 78:ed8466a608b4 60 #define __NOP __nop
emilmont 78:ed8466a608b4 61
emilmont 78:ed8466a608b4 62
emilmont 78:ed8466a608b4 63 /** \brief Wait For Interrupt
emilmont 78:ed8466a608b4 64
emilmont 78:ed8466a608b4 65 Wait For Interrupt is a hint instruction that suspends execution
emilmont 78:ed8466a608b4 66 until one of a number of events occurs.
emilmont 78:ed8466a608b4 67 */
emilmont 78:ed8466a608b4 68 #define __WFI __wfi
emilmont 78:ed8466a608b4 69
emilmont 78:ed8466a608b4 70
emilmont 78:ed8466a608b4 71 /** \brief Wait For Event
emilmont 78:ed8466a608b4 72
emilmont 78:ed8466a608b4 73 Wait For Event is a hint instruction that permits the processor to enter
emilmont 78:ed8466a608b4 74 a low-power state until one of a number of events occurs.
emilmont 78:ed8466a608b4 75 */
emilmont 78:ed8466a608b4 76 #define __WFE __wfe
emilmont 78:ed8466a608b4 77
emilmont 78:ed8466a608b4 78
emilmont 78:ed8466a608b4 79 /** \brief Send Event
emilmont 78:ed8466a608b4 80
emilmont 78:ed8466a608b4 81 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
emilmont 78:ed8466a608b4 82 */
emilmont 78:ed8466a608b4 83 #define __SEV __sev
emilmont 78:ed8466a608b4 84
emilmont 78:ed8466a608b4 85
emilmont 78:ed8466a608b4 86 /** \brief Instruction Synchronization Barrier
emilmont 78:ed8466a608b4 87
emilmont 78:ed8466a608b4 88 Instruction Synchronization Barrier flushes the pipeline in the processor,
emilmont 78:ed8466a608b4 89 so that all instructions following the ISB are fetched from cache or
emilmont 78:ed8466a608b4 90 memory, after the instruction has been completed.
emilmont 78:ed8466a608b4 91 */
emilmont 78:ed8466a608b4 92 #define __ISB() __isb(0xF)
emilmont 78:ed8466a608b4 93
emilmont 78:ed8466a608b4 94
emilmont 78:ed8466a608b4 95 /** \brief Data Synchronization Barrier
emilmont 78:ed8466a608b4 96
emilmont 78:ed8466a608b4 97 This function acts as a special kind of Data Memory Barrier.
emilmont 78:ed8466a608b4 98 It completes when all explicit memory accesses before this instruction complete.
emilmont 78:ed8466a608b4 99 */
emilmont 78:ed8466a608b4 100 #define __DSB() __dsb(0xF)
emilmont 78:ed8466a608b4 101
emilmont 78:ed8466a608b4 102
emilmont 78:ed8466a608b4 103 /** \brief Data Memory Barrier
emilmont 78:ed8466a608b4 104
emilmont 78:ed8466a608b4 105 This function ensures the apparent order of the explicit memory operations before
emilmont 78:ed8466a608b4 106 and after the instruction, without ensuring their completion.
emilmont 78:ed8466a608b4 107 */
emilmont 78:ed8466a608b4 108 #define __DMB() __dmb(0xF)
emilmont 78:ed8466a608b4 109
emilmont 78:ed8466a608b4 110
emilmont 78:ed8466a608b4 111 /** \brief Reverse byte order (32 bit)
emilmont 78:ed8466a608b4 112
emilmont 78:ed8466a608b4 113 This function reverses the byte order in integer value.
emilmont 78:ed8466a608b4 114
emilmont 78:ed8466a608b4 115 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 116 \return Reversed value
emilmont 78:ed8466a608b4 117 */
emilmont 78:ed8466a608b4 118 #define __REV __rev
emilmont 78:ed8466a608b4 119
emilmont 78:ed8466a608b4 120
emilmont 78:ed8466a608b4 121 /** \brief Reverse byte order (16 bit)
emilmont 78:ed8466a608b4 122
emilmont 78:ed8466a608b4 123 This function reverses the byte order in two unsigned short values.
emilmont 78:ed8466a608b4 124
emilmont 78:ed8466a608b4 125 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 126 \return Reversed value
emilmont 78:ed8466a608b4 127 */
emilmont 78:ed8466a608b4 128 #ifndef __NO_EMBEDDED_ASM
emilmont 78:ed8466a608b4 129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
emilmont 78:ed8466a608b4 130 {
emilmont 78:ed8466a608b4 131 rev16 r0, r0
emilmont 78:ed8466a608b4 132 bx lr
emilmont 78:ed8466a608b4 133 }
emilmont 78:ed8466a608b4 134 #endif
emilmont 78:ed8466a608b4 135
emilmont 78:ed8466a608b4 136 /** \brief Reverse byte order in signed short value
emilmont 78:ed8466a608b4 137
emilmont 78:ed8466a608b4 138 This function reverses the byte order in a signed short value with sign extension to integer.
emilmont 78:ed8466a608b4 139
emilmont 78:ed8466a608b4 140 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 141 \return Reversed value
emilmont 78:ed8466a608b4 142 */
emilmont 78:ed8466a608b4 143 #ifndef __NO_EMBEDDED_ASM
emilmont 78:ed8466a608b4 144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
emilmont 78:ed8466a608b4 145 {
emilmont 78:ed8466a608b4 146 revsh r0, r0
emilmont 78:ed8466a608b4 147 bx lr
emilmont 78:ed8466a608b4 148 }
emilmont 78:ed8466a608b4 149 #endif
emilmont 78:ed8466a608b4 150
emilmont 78:ed8466a608b4 151
emilmont 78:ed8466a608b4 152 /** \brief Rotate Right in unsigned value (32 bit)
emilmont 78:ed8466a608b4 153
emilmont 78:ed8466a608b4 154 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
emilmont 78:ed8466a608b4 155
emilmont 78:ed8466a608b4 156 \param [in] value Value to rotate
emilmont 78:ed8466a608b4 157 \param [in] value Number of Bits to rotate
emilmont 78:ed8466a608b4 158 \return Rotated value
emilmont 78:ed8466a608b4 159 */
emilmont 78:ed8466a608b4 160 #define __ROR __ror
emilmont 78:ed8466a608b4 161
emilmont 78:ed8466a608b4 162
emilmont 78:ed8466a608b4 163 /** \brief Breakpoint
emilmont 78:ed8466a608b4 164
emilmont 78:ed8466a608b4 165 This function causes the processor to enter Debug state.
emilmont 78:ed8466a608b4 166 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
emilmont 78:ed8466a608b4 167
emilmont 78:ed8466a608b4 168 \param [in] value is ignored by the processor.
emilmont 78:ed8466a608b4 169 If required, a debugger can use it to store additional information about the breakpoint.
emilmont 78:ed8466a608b4 170 */
emilmont 78:ed8466a608b4 171 #define __BKPT(value) __breakpoint(value)
emilmont 78:ed8466a608b4 172
emilmont 78:ed8466a608b4 173
emilmont 78:ed8466a608b4 174 #if (__CORTEX_M >= 0x03)
emilmont 78:ed8466a608b4 175
emilmont 78:ed8466a608b4 176 /** \brief Reverse bit order of value
emilmont 78:ed8466a608b4 177
emilmont 78:ed8466a608b4 178 This function reverses the bit order of the given value.
emilmont 78:ed8466a608b4 179
emilmont 78:ed8466a608b4 180 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 181 \return Reversed value
emilmont 78:ed8466a608b4 182 */
emilmont 78:ed8466a608b4 183 #define __RBIT __rbit
emilmont 78:ed8466a608b4 184
emilmont 78:ed8466a608b4 185
emilmont 78:ed8466a608b4 186 /** \brief LDR Exclusive (8 bit)
emilmont 78:ed8466a608b4 187
emilmont 78:ed8466a608b4 188 This function performs a exclusive LDR command for 8 bit value.
emilmont 78:ed8466a608b4 189
emilmont 78:ed8466a608b4 190 \param [in] ptr Pointer to data
emilmont 78:ed8466a608b4 191 \return value of type uint8_t at (*ptr)
emilmont 78:ed8466a608b4 192 */
emilmont 78:ed8466a608b4 193 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
emilmont 78:ed8466a608b4 194
emilmont 78:ed8466a608b4 195
emilmont 78:ed8466a608b4 196 /** \brief LDR Exclusive (16 bit)
emilmont 78:ed8466a608b4 197
emilmont 78:ed8466a608b4 198 This function performs a exclusive LDR command for 16 bit values.
emilmont 78:ed8466a608b4 199
emilmont 78:ed8466a608b4 200 \param [in] ptr Pointer to data
emilmont 78:ed8466a608b4 201 \return value of type uint16_t at (*ptr)
emilmont 78:ed8466a608b4 202 */
emilmont 78:ed8466a608b4 203 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
emilmont 78:ed8466a608b4 204
emilmont 78:ed8466a608b4 205
emilmont 78:ed8466a608b4 206 /** \brief LDR Exclusive (32 bit)
emilmont 78:ed8466a608b4 207
emilmont 78:ed8466a608b4 208 This function performs a exclusive LDR command for 32 bit values.
emilmont 78:ed8466a608b4 209
emilmont 78:ed8466a608b4 210 \param [in] ptr Pointer to data
emilmont 78:ed8466a608b4 211 \return value of type uint32_t at (*ptr)
emilmont 78:ed8466a608b4 212 */
emilmont 78:ed8466a608b4 213 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
emilmont 78:ed8466a608b4 214
emilmont 78:ed8466a608b4 215
emilmont 78:ed8466a608b4 216 /** \brief STR Exclusive (8 bit)
emilmont 78:ed8466a608b4 217
emilmont 78:ed8466a608b4 218 This function performs a exclusive STR command for 8 bit values.
emilmont 78:ed8466a608b4 219
emilmont 78:ed8466a608b4 220 \param [in] value Value to store
emilmont 78:ed8466a608b4 221 \param [in] ptr Pointer to location
emilmont 78:ed8466a608b4 222 \return 0 Function succeeded
emilmont 78:ed8466a608b4 223 \return 1 Function failed
emilmont 78:ed8466a608b4 224 */
emilmont 78:ed8466a608b4 225 #define __STREXB(value, ptr) __strex(value, ptr)
emilmont 78:ed8466a608b4 226
emilmont 78:ed8466a608b4 227
emilmont 78:ed8466a608b4 228 /** \brief STR Exclusive (16 bit)
emilmont 78:ed8466a608b4 229
emilmont 78:ed8466a608b4 230 This function performs a exclusive STR command for 16 bit values.
emilmont 78:ed8466a608b4 231
emilmont 78:ed8466a608b4 232 \param [in] value Value to store
emilmont 78:ed8466a608b4 233 \param [in] ptr Pointer to location
emilmont 78:ed8466a608b4 234 \return 0 Function succeeded
emilmont 78:ed8466a608b4 235 \return 1 Function failed
emilmont 78:ed8466a608b4 236 */
emilmont 78:ed8466a608b4 237 #define __STREXH(value, ptr) __strex(value, ptr)
emilmont 78:ed8466a608b4 238
emilmont 78:ed8466a608b4 239
emilmont 78:ed8466a608b4 240 /** \brief STR Exclusive (32 bit)
emilmont 78:ed8466a608b4 241
emilmont 78:ed8466a608b4 242 This function performs a exclusive STR command for 32 bit values.
emilmont 78:ed8466a608b4 243
emilmont 78:ed8466a608b4 244 \param [in] value Value to store
emilmont 78:ed8466a608b4 245 \param [in] ptr Pointer to location
emilmont 78:ed8466a608b4 246 \return 0 Function succeeded
emilmont 78:ed8466a608b4 247 \return 1 Function failed
emilmont 78:ed8466a608b4 248 */
emilmont 78:ed8466a608b4 249 #define __STREXW(value, ptr) __strex(value, ptr)
emilmont 78:ed8466a608b4 250
emilmont 78:ed8466a608b4 251
emilmont 78:ed8466a608b4 252 /** \brief Remove the exclusive lock
emilmont 78:ed8466a608b4 253
emilmont 78:ed8466a608b4 254 This function removes the exclusive lock which is created by LDREX.
emilmont 78:ed8466a608b4 255
emilmont 78:ed8466a608b4 256 */
emilmont 78:ed8466a608b4 257 #define __CLREX __clrex
emilmont 78:ed8466a608b4 258
emilmont 78:ed8466a608b4 259
emilmont 78:ed8466a608b4 260 /** \brief Signed Saturate
emilmont 78:ed8466a608b4 261
emilmont 78:ed8466a608b4 262 This function saturates a signed value.
emilmont 78:ed8466a608b4 263
emilmont 78:ed8466a608b4 264 \param [in] value Value to be saturated
emilmont 78:ed8466a608b4 265 \param [in] sat Bit position to saturate to (1..32)
emilmont 78:ed8466a608b4 266 \return Saturated value
emilmont 78:ed8466a608b4 267 */
emilmont 78:ed8466a608b4 268 #define __SSAT __ssat
emilmont 78:ed8466a608b4 269
emilmont 78:ed8466a608b4 270
emilmont 78:ed8466a608b4 271 /** \brief Unsigned Saturate
emilmont 78:ed8466a608b4 272
emilmont 78:ed8466a608b4 273 This function saturates an unsigned value.
emilmont 78:ed8466a608b4 274
emilmont 78:ed8466a608b4 275 \param [in] value Value to be saturated
emilmont 78:ed8466a608b4 276 \param [in] sat Bit position to saturate to (0..31)
emilmont 78:ed8466a608b4 277 \return Saturated value
emilmont 78:ed8466a608b4 278 */
emilmont 78:ed8466a608b4 279 #define __USAT __usat
emilmont 78:ed8466a608b4 280
emilmont 78:ed8466a608b4 281
emilmont 78:ed8466a608b4 282 /** \brief Count leading zeros
emilmont 78:ed8466a608b4 283
emilmont 78:ed8466a608b4 284 This function counts the number of leading zeros of a data value.
emilmont 78:ed8466a608b4 285
emilmont 78:ed8466a608b4 286 \param [in] value Value to count the leading zeros
emilmont 78:ed8466a608b4 287 \return number of leading zeros in value
emilmont 78:ed8466a608b4 288 */
emilmont 78:ed8466a608b4 289 #define __CLZ __clz
emilmont 78:ed8466a608b4 290
emilmont 78:ed8466a608b4 291 #endif /* (__CORTEX_M >= 0x03) */
emilmont 78:ed8466a608b4 292
emilmont 78:ed8466a608b4 293
emilmont 78:ed8466a608b4 294
emilmont 78:ed8466a608b4 295 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
emilmont 78:ed8466a608b4 296 /* IAR iccarm specific functions */
emilmont 78:ed8466a608b4 297
emilmont 78:ed8466a608b4 298 #include <cmsis_iar.h>
emilmont 78:ed8466a608b4 299
emilmont 78:ed8466a608b4 300
emilmont 78:ed8466a608b4 301 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
emilmont 78:ed8466a608b4 302 /* TI CCS specific functions */
emilmont 78:ed8466a608b4 303
emilmont 78:ed8466a608b4 304 #include <cmsis_ccs.h>
emilmont 78:ed8466a608b4 305
emilmont 78:ed8466a608b4 306
emilmont 78:ed8466a608b4 307 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
emilmont 78:ed8466a608b4 308 /* GNU gcc specific functions */
emilmont 78:ed8466a608b4 309
emilmont 78:ed8466a608b4 310 /* Define macros for porting to both thumb1 and thumb2.
emilmont 78:ed8466a608b4 311 * For thumb1, use low register (r0-r7), specified by constrant "l"
emilmont 78:ed8466a608b4 312 * Otherwise, use general registers, specified by constrant "r" */
emilmont 78:ed8466a608b4 313 #if defined (__thumb__) && !defined (__thumb2__)
emilmont 78:ed8466a608b4 314 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
emilmont 78:ed8466a608b4 315 #define __CMSIS_GCC_USE_REG(r) "l" (r)
emilmont 78:ed8466a608b4 316 #else
emilmont 78:ed8466a608b4 317 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
emilmont 78:ed8466a608b4 318 #define __CMSIS_GCC_USE_REG(r) "r" (r)
emilmont 78:ed8466a608b4 319 #endif
emilmont 78:ed8466a608b4 320
emilmont 78:ed8466a608b4 321 /** \brief No Operation
emilmont 78:ed8466a608b4 322
emilmont 78:ed8466a608b4 323 No Operation does nothing. This instruction can be used for code alignment purposes.
emilmont 78:ed8466a608b4 324 */
emilmont 78:ed8466a608b4 325 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
emilmont 78:ed8466a608b4 326 {
emilmont 78:ed8466a608b4 327 __ASM volatile ("nop");
emilmont 78:ed8466a608b4 328 }
emilmont 78:ed8466a608b4 329
emilmont 78:ed8466a608b4 330
emilmont 78:ed8466a608b4 331 /** \brief Wait For Interrupt
emilmont 78:ed8466a608b4 332
emilmont 78:ed8466a608b4 333 Wait For Interrupt is a hint instruction that suspends execution
emilmont 78:ed8466a608b4 334 until one of a number of events occurs.
emilmont 78:ed8466a608b4 335 */
emilmont 78:ed8466a608b4 336 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
emilmont 78:ed8466a608b4 337 {
emilmont 78:ed8466a608b4 338 __ASM volatile ("wfi");
emilmont 78:ed8466a608b4 339 }
emilmont 78:ed8466a608b4 340
emilmont 78:ed8466a608b4 341
emilmont 78:ed8466a608b4 342 /** \brief Wait For Event
emilmont 78:ed8466a608b4 343
emilmont 78:ed8466a608b4 344 Wait For Event is a hint instruction that permits the processor to enter
emilmont 78:ed8466a608b4 345 a low-power state until one of a number of events occurs.
emilmont 78:ed8466a608b4 346 */
emilmont 78:ed8466a608b4 347 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
emilmont 78:ed8466a608b4 348 {
emilmont 78:ed8466a608b4 349 __ASM volatile ("wfe");
emilmont 78:ed8466a608b4 350 }
emilmont 78:ed8466a608b4 351
emilmont 78:ed8466a608b4 352
emilmont 78:ed8466a608b4 353 /** \brief Send Event
emilmont 78:ed8466a608b4 354
emilmont 78:ed8466a608b4 355 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
emilmont 78:ed8466a608b4 356 */
emilmont 78:ed8466a608b4 357 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
emilmont 78:ed8466a608b4 358 {
emilmont 78:ed8466a608b4 359 __ASM volatile ("sev");
emilmont 78:ed8466a608b4 360 }
emilmont 78:ed8466a608b4 361
emilmont 78:ed8466a608b4 362
emilmont 78:ed8466a608b4 363 /** \brief Instruction Synchronization Barrier
emilmont 78:ed8466a608b4 364
emilmont 78:ed8466a608b4 365 Instruction Synchronization Barrier flushes the pipeline in the processor,
emilmont 78:ed8466a608b4 366 so that all instructions following the ISB are fetched from cache or
emilmont 78:ed8466a608b4 367 memory, after the instruction has been completed.
emilmont 78:ed8466a608b4 368 */
emilmont 78:ed8466a608b4 369 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
emilmont 78:ed8466a608b4 370 {
emilmont 78:ed8466a608b4 371 __ASM volatile ("isb");
emilmont 78:ed8466a608b4 372 }
emilmont 78:ed8466a608b4 373
emilmont 78:ed8466a608b4 374
emilmont 78:ed8466a608b4 375 /** \brief Data Synchronization Barrier
emilmont 78:ed8466a608b4 376
emilmont 78:ed8466a608b4 377 This function acts as a special kind of Data Memory Barrier.
emilmont 78:ed8466a608b4 378 It completes when all explicit memory accesses before this instruction complete.
emilmont 78:ed8466a608b4 379 */
emilmont 78:ed8466a608b4 380 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
emilmont 78:ed8466a608b4 381 {
emilmont 78:ed8466a608b4 382 __ASM volatile ("dsb");
emilmont 78:ed8466a608b4 383 }
emilmont 78:ed8466a608b4 384
emilmont 78:ed8466a608b4 385
emilmont 78:ed8466a608b4 386 /** \brief Data Memory Barrier
emilmont 78:ed8466a608b4 387
emilmont 78:ed8466a608b4 388 This function ensures the apparent order of the explicit memory operations before
emilmont 78:ed8466a608b4 389 and after the instruction, without ensuring their completion.
emilmont 78:ed8466a608b4 390 */
emilmont 78:ed8466a608b4 391 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
emilmont 78:ed8466a608b4 392 {
emilmont 78:ed8466a608b4 393 __ASM volatile ("dmb");
emilmont 78:ed8466a608b4 394 }
emilmont 78:ed8466a608b4 395
emilmont 78:ed8466a608b4 396
emilmont 78:ed8466a608b4 397 /** \brief Reverse byte order (32 bit)
emilmont 78:ed8466a608b4 398
emilmont 78:ed8466a608b4 399 This function reverses the byte order in integer value.
emilmont 78:ed8466a608b4 400
emilmont 78:ed8466a608b4 401 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 402 \return Reversed value
emilmont 78:ed8466a608b4 403 */
emilmont 78:ed8466a608b4 404 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
emilmont 78:ed8466a608b4 405 {
emilmont 78:ed8466a608b4 406 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
emilmont 78:ed8466a608b4 407 return __builtin_bswap32(value);
emilmont 78:ed8466a608b4 408 #else
emilmont 78:ed8466a608b4 409 uint32_t result;
emilmont 78:ed8466a608b4 410
emilmont 78:ed8466a608b4 411 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
emilmont 78:ed8466a608b4 412 return(result);
emilmont 78:ed8466a608b4 413 #endif
emilmont 78:ed8466a608b4 414 }
emilmont 78:ed8466a608b4 415
emilmont 78:ed8466a608b4 416
emilmont 78:ed8466a608b4 417 /** \brief Reverse byte order (16 bit)
emilmont 78:ed8466a608b4 418
emilmont 78:ed8466a608b4 419 This function reverses the byte order in two unsigned short values.
emilmont 78:ed8466a608b4 420
emilmont 78:ed8466a608b4 421 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 422 \return Reversed value
emilmont 78:ed8466a608b4 423 */
emilmont 78:ed8466a608b4 424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
emilmont 78:ed8466a608b4 425 {
emilmont 78:ed8466a608b4 426 uint32_t result;
emilmont 78:ed8466a608b4 427
emilmont 78:ed8466a608b4 428 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
emilmont 78:ed8466a608b4 429 return(result);
emilmont 78:ed8466a608b4 430 }
emilmont 78:ed8466a608b4 431
emilmont 78:ed8466a608b4 432
emilmont 78:ed8466a608b4 433 /** \brief Reverse byte order in signed short value
emilmont 78:ed8466a608b4 434
emilmont 78:ed8466a608b4 435 This function reverses the byte order in a signed short value with sign extension to integer.
emilmont 78:ed8466a608b4 436
emilmont 78:ed8466a608b4 437 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 438 \return Reversed value
emilmont 78:ed8466a608b4 439 */
emilmont 78:ed8466a608b4 440 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
emilmont 78:ed8466a608b4 441 {
emilmont 78:ed8466a608b4 442 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
emilmont 78:ed8466a608b4 443 return (short)__builtin_bswap16(value);
emilmont 78:ed8466a608b4 444 #else
emilmont 78:ed8466a608b4 445 uint32_t result;
emilmont 78:ed8466a608b4 446
emilmont 78:ed8466a608b4 447 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
emilmont 78:ed8466a608b4 448 return(result);
emilmont 78:ed8466a608b4 449 #endif
emilmont 78:ed8466a608b4 450 }
emilmont 78:ed8466a608b4 451
emilmont 78:ed8466a608b4 452
emilmont 78:ed8466a608b4 453 /** \brief Rotate Right in unsigned value (32 bit)
emilmont 78:ed8466a608b4 454
emilmont 78:ed8466a608b4 455 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
emilmont 78:ed8466a608b4 456
emilmont 78:ed8466a608b4 457 \param [in] value Value to rotate
emilmont 78:ed8466a608b4 458 \param [in] value Number of Bits to rotate
emilmont 78:ed8466a608b4 459 \return Rotated value
emilmont 78:ed8466a608b4 460 */
emilmont 78:ed8466a608b4 461 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
emilmont 78:ed8466a608b4 462 {
emilmont 78:ed8466a608b4 463 return (op1 >> op2) | (op1 << (32 - op2));
emilmont 78:ed8466a608b4 464 }
emilmont 78:ed8466a608b4 465
emilmont 78:ed8466a608b4 466
emilmont 78:ed8466a608b4 467 /** \brief Breakpoint
emilmont 78:ed8466a608b4 468
emilmont 78:ed8466a608b4 469 This function causes the processor to enter Debug state.
emilmont 78:ed8466a608b4 470 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
emilmont 78:ed8466a608b4 471
emilmont 78:ed8466a608b4 472 \param [in] value is ignored by the processor.
emilmont 78:ed8466a608b4 473 If required, a debugger can use it to store additional information about the breakpoint.
emilmont 78:ed8466a608b4 474 */
emilmont 78:ed8466a608b4 475 #define __BKPT(value) __ASM volatile ("bkpt "#value)
emilmont 78:ed8466a608b4 476
emilmont 78:ed8466a608b4 477
emilmont 78:ed8466a608b4 478 #if (__CORTEX_M >= 0x03)
emilmont 78:ed8466a608b4 479
emilmont 78:ed8466a608b4 480 /** \brief Reverse bit order of value
emilmont 78:ed8466a608b4 481
emilmont 78:ed8466a608b4 482 This function reverses the bit order of the given value.
emilmont 78:ed8466a608b4 483
emilmont 78:ed8466a608b4 484 \param [in] value Value to reverse
emilmont 78:ed8466a608b4 485 \return Reversed value
emilmont 78:ed8466a608b4 486 */
emilmont 78:ed8466a608b4 487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
emilmont 78:ed8466a608b4 488 {
emilmont 78:ed8466a608b4 489 uint32_t result;
emilmont 78:ed8466a608b4 490
emilmont 78:ed8466a608b4 491 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
emilmont 78:ed8466a608b4 492 return(result);
emilmont 78:ed8466a608b4 493 }
emilmont 78:ed8466a608b4 494
emilmont 78:ed8466a608b4 495
emilmont 78:ed8466a608b4 496 /** \brief LDR Exclusive (8 bit)
emilmont 78:ed8466a608b4 497
emilmont 78:ed8466a608b4 498 This function performs a exclusive LDR command for 8 bit value.
emilmont 78:ed8466a608b4 499
emilmont 78:ed8466a608b4 500 \param [in] ptr Pointer to data
emilmont 78:ed8466a608b4 501 \return value of type uint8_t at (*ptr)
emilmont 78:ed8466a608b4 502 */
emilmont 78:ed8466a608b4 503 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
emilmont 78:ed8466a608b4 504 {
emilmont 78:ed8466a608b4 505 uint32_t result;
emilmont 78:ed8466a608b4 506
emilmont 78:ed8466a608b4 507 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
emilmont 78:ed8466a608b4 508 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
emilmont 78:ed8466a608b4 509 #else
emilmont 78:ed8466a608b4 510 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
emilmont 78:ed8466a608b4 511 accepted by assembler. So has to use following less efficient pattern.
emilmont 78:ed8466a608b4 512 */
emilmont 78:ed8466a608b4 513 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
emilmont 78:ed8466a608b4 514 #endif
emilmont 78:ed8466a608b4 515 return(result);
emilmont 78:ed8466a608b4 516 }
emilmont 78:ed8466a608b4 517
emilmont 78:ed8466a608b4 518
emilmont 78:ed8466a608b4 519 /** \brief LDR Exclusive (16 bit)
emilmont 78:ed8466a608b4 520
emilmont 78:ed8466a608b4 521 This function performs a exclusive LDR command for 16 bit values.
emilmont 78:ed8466a608b4 522
emilmont 78:ed8466a608b4 523 \param [in] ptr Pointer to data
emilmont 78:ed8466a608b4 524 \return value of type uint16_t at (*ptr)
emilmont 78:ed8466a608b4 525 */
emilmont 78:ed8466a608b4 526 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
emilmont 78:ed8466a608b4 527 {
emilmont 78:ed8466a608b4 528 uint32_t result;
emilmont 78:ed8466a608b4 529
emilmont 78:ed8466a608b4 530 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
emilmont 78:ed8466a608b4 531 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
emilmont 78:ed8466a608b4 532 #else
emilmont 78:ed8466a608b4 533 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
emilmont 78:ed8466a608b4 534 accepted by assembler. So has to use following less efficient pattern.
emilmont 78:ed8466a608b4 535 */
emilmont 78:ed8466a608b4 536 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
emilmont 78:ed8466a608b4 537 #endif
emilmont 78:ed8466a608b4 538 return(result);
emilmont 78:ed8466a608b4 539 }
emilmont 78:ed8466a608b4 540
emilmont 78:ed8466a608b4 541
emilmont 78:ed8466a608b4 542 /** \brief LDR Exclusive (32 bit)
emilmont 78:ed8466a608b4 543
emilmont 78:ed8466a608b4 544 This function performs a exclusive LDR command for 32 bit values.
emilmont 78:ed8466a608b4 545
emilmont 78:ed8466a608b4 546 \param [in] ptr Pointer to data
emilmont 78:ed8466a608b4 547 \return value of type uint32_t at (*ptr)
emilmont 78:ed8466a608b4 548 */
emilmont 78:ed8466a608b4 549 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
emilmont 78:ed8466a608b4 550 {
emilmont 78:ed8466a608b4 551 uint32_t result;
emilmont 78:ed8466a608b4 552
emilmont 78:ed8466a608b4 553 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
emilmont 78:ed8466a608b4 554 return(result);
emilmont 78:ed8466a608b4 555 }
emilmont 78:ed8466a608b4 556
emilmont 78:ed8466a608b4 557
emilmont 78:ed8466a608b4 558 /** \brief STR Exclusive (8 bit)
emilmont 78:ed8466a608b4 559
emilmont 78:ed8466a608b4 560 This function performs a exclusive STR command for 8 bit values.
emilmont 78:ed8466a608b4 561
emilmont 78:ed8466a608b4 562 \param [in] value Value to store
emilmont 78:ed8466a608b4 563 \param [in] ptr Pointer to location
emilmont 78:ed8466a608b4 564 \return 0 Function succeeded
emilmont 78:ed8466a608b4 565 \return 1 Function failed
emilmont 78:ed8466a608b4 566 */
emilmont 78:ed8466a608b4 567 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
emilmont 78:ed8466a608b4 568 {
emilmont 78:ed8466a608b4 569 uint32_t result;
emilmont 78:ed8466a608b4 570
emilmont 78:ed8466a608b4 571 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
emilmont 78:ed8466a608b4 572 return(result);
emilmont 78:ed8466a608b4 573 }
emilmont 78:ed8466a608b4 574
emilmont 78:ed8466a608b4 575
emilmont 78:ed8466a608b4 576 /** \brief STR Exclusive (16 bit)
emilmont 78:ed8466a608b4 577
emilmont 78:ed8466a608b4 578 This function performs a exclusive STR command for 16 bit values.
emilmont 78:ed8466a608b4 579
emilmont 78:ed8466a608b4 580 \param [in] value Value to store
emilmont 78:ed8466a608b4 581 \param [in] ptr Pointer to location
emilmont 78:ed8466a608b4 582 \return 0 Function succeeded
emilmont 78:ed8466a608b4 583 \return 1 Function failed
emilmont 78:ed8466a608b4 584 */
emilmont 78:ed8466a608b4 585 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
emilmont 78:ed8466a608b4 586 {
emilmont 78:ed8466a608b4 587 uint32_t result;
emilmont 78:ed8466a608b4 588
emilmont 78:ed8466a608b4 589 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
emilmont 78:ed8466a608b4 590 return(result);
emilmont 78:ed8466a608b4 591 }
emilmont 78:ed8466a608b4 592
emilmont 78:ed8466a608b4 593
emilmont 78:ed8466a608b4 594 /** \brief STR Exclusive (32 bit)
emilmont 78:ed8466a608b4 595
emilmont 78:ed8466a608b4 596 This function performs a exclusive STR command for 32 bit values.
emilmont 78:ed8466a608b4 597
emilmont 78:ed8466a608b4 598 \param [in] value Value to store
emilmont 78:ed8466a608b4 599 \param [in] ptr Pointer to location
emilmont 78:ed8466a608b4 600 \return 0 Function succeeded
emilmont 78:ed8466a608b4 601 \return 1 Function failed
emilmont 78:ed8466a608b4 602 */
emilmont 78:ed8466a608b4 603 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
emilmont 78:ed8466a608b4 604 {
emilmont 78:ed8466a608b4 605 uint32_t result;
emilmont 78:ed8466a608b4 606
emilmont 78:ed8466a608b4 607 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
emilmont 78:ed8466a608b4 608 return(result);
emilmont 78:ed8466a608b4 609 }
emilmont 78:ed8466a608b4 610
emilmont 78:ed8466a608b4 611
emilmont 78:ed8466a608b4 612 /** \brief Remove the exclusive lock
emilmont 78:ed8466a608b4 613
emilmont 78:ed8466a608b4 614 This function removes the exclusive lock which is created by LDREX.
emilmont 78:ed8466a608b4 615
emilmont 78:ed8466a608b4 616 */
emilmont 78:ed8466a608b4 617 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
emilmont 78:ed8466a608b4 618 {
emilmont 78:ed8466a608b4 619 __ASM volatile ("clrex" ::: "memory");
emilmont 78:ed8466a608b4 620 }
emilmont 78:ed8466a608b4 621
emilmont 78:ed8466a608b4 622
emilmont 78:ed8466a608b4 623 /** \brief Signed Saturate
emilmont 78:ed8466a608b4 624
emilmont 78:ed8466a608b4 625 This function saturates a signed value.
emilmont 78:ed8466a608b4 626
emilmont 78:ed8466a608b4 627 \param [in] value Value to be saturated
emilmont 78:ed8466a608b4 628 \param [in] sat Bit position to saturate to (1..32)
emilmont 78:ed8466a608b4 629 \return Saturated value
emilmont 78:ed8466a608b4 630 */
emilmont 78:ed8466a608b4 631 #define __SSAT(ARG1,ARG2) \
emilmont 78:ed8466a608b4 632 ({ \
emilmont 78:ed8466a608b4 633 uint32_t __RES, __ARG1 = (ARG1); \
emilmont 78:ed8466a608b4 634 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
emilmont 78:ed8466a608b4 635 __RES; \
emilmont 78:ed8466a608b4 636 })
emilmont 78:ed8466a608b4 637
emilmont 78:ed8466a608b4 638
emilmont 78:ed8466a608b4 639 /** \brief Unsigned Saturate
emilmont 78:ed8466a608b4 640
emilmont 78:ed8466a608b4 641 This function saturates an unsigned value.
emilmont 78:ed8466a608b4 642
emilmont 78:ed8466a608b4 643 \param [in] value Value to be saturated
emilmont 78:ed8466a608b4 644 \param [in] sat Bit position to saturate to (0..31)
emilmont 78:ed8466a608b4 645 \return Saturated value
emilmont 78:ed8466a608b4 646 */
emilmont 78:ed8466a608b4 647 #define __USAT(ARG1,ARG2) \
emilmont 78:ed8466a608b4 648 ({ \
emilmont 78:ed8466a608b4 649 uint32_t __RES, __ARG1 = (ARG1); \
emilmont 78:ed8466a608b4 650 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
emilmont 78:ed8466a608b4 651 __RES; \
emilmont 78:ed8466a608b4 652 })
emilmont 78:ed8466a608b4 653
emilmont 78:ed8466a608b4 654
emilmont 78:ed8466a608b4 655 /** \brief Count leading zeros
emilmont 78:ed8466a608b4 656
emilmont 78:ed8466a608b4 657 This function counts the number of leading zeros of a data value.
emilmont 78:ed8466a608b4 658
emilmont 78:ed8466a608b4 659 \param [in] value Value to count the leading zeros
emilmont 78:ed8466a608b4 660 \return number of leading zeros in value
emilmont 78:ed8466a608b4 661 */
emilmont 78:ed8466a608b4 662 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
emilmont 78:ed8466a608b4 663 {
emilmont 78:ed8466a608b4 664 uint32_t result;
emilmont 78:ed8466a608b4 665
emilmont 78:ed8466a608b4 666 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
emilmont 78:ed8466a608b4 667 return(result);
emilmont 78:ed8466a608b4 668 }
emilmont 78:ed8466a608b4 669
emilmont 78:ed8466a608b4 670 #endif /* (__CORTEX_M >= 0x03) */
emilmont 78:ed8466a608b4 671
emilmont 78:ed8466a608b4 672
emilmont 78:ed8466a608b4 673
emilmont 78:ed8466a608b4 674
emilmont 78:ed8466a608b4 675 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
emilmont 78:ed8466a608b4 676 /* TASKING carm specific functions */
emilmont 78:ed8466a608b4 677
emilmont 78:ed8466a608b4 678 /*
emilmont 78:ed8466a608b4 679 * The CMSIS functions have been implemented as intrinsics in the compiler.
emilmont 78:ed8466a608b4 680 * Please use "carm -?i" to get an up to date list of all intrinsics,
emilmont 78:ed8466a608b4 681 * Including the CMSIS ones.
emilmont 78:ed8466a608b4 682 */
emilmont 78:ed8466a608b4 683
emilmont 78:ed8466a608b4 684 #endif
emilmont 78:ed8466a608b4 685
emilmont 78:ed8466a608b4 686 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
emilmont 78:ed8466a608b4 687
emilmont 78:ed8466a608b4 688 #endif /* __CORE_CMINSTR_H */