The official Mbed 2 C/C++ SDK provides the software platform and libraries to build your applications.

Dependents:   hello SerialTestv11 SerialTestv12 Sierpinski ... more

mbed 2

This is the mbed 2 library. If you'd like to learn about Mbed OS please see the mbed-os docs.

Committer:
emilmont
Date:
Fri Feb 21 12:21:39 2014 +0000
Revision:
80:8e73be2a2ac1
First alpha release for the NRF51822 target (to be tested in the online IDE)

Who changed what in which revision?

UserRevisionLine numberNew contents of line
emilmont 80:8e73be2a2ac1 1 /**************************************************************************//**
emilmont 80:8e73be2a2ac1 2 * @file core_cm4_simd.h
emilmont 80:8e73be2a2ac1 3 * @brief CMSIS Cortex-M4 SIMD Header File
emilmont 80:8e73be2a2ac1 4 * @version V3.20
emilmont 80:8e73be2a2ac1 5 * @date 25. February 2013
emilmont 80:8e73be2a2ac1 6 *
emilmont 80:8e73be2a2ac1 7 * @note
emilmont 80:8e73be2a2ac1 8 *
emilmont 80:8e73be2a2ac1 9 ******************************************************************************/
emilmont 80:8e73be2a2ac1 10 /* Copyright (c) 2009 - 2013 ARM LIMITED
emilmont 80:8e73be2a2ac1 11
emilmont 80:8e73be2a2ac1 12 All rights reserved.
emilmont 80:8e73be2a2ac1 13 Redistribution and use in source and binary forms, with or without
emilmont 80:8e73be2a2ac1 14 modification, are permitted provided that the following conditions are met:
emilmont 80:8e73be2a2ac1 15 - Redistributions of source code must retain the above copyright
emilmont 80:8e73be2a2ac1 16 notice, this list of conditions and the following disclaimer.
emilmont 80:8e73be2a2ac1 17 - Redistributions in binary form must reproduce the above copyright
emilmont 80:8e73be2a2ac1 18 notice, this list of conditions and the following disclaimer in the
emilmont 80:8e73be2a2ac1 19 documentation and/or other materials provided with the distribution.
emilmont 80:8e73be2a2ac1 20 - Neither the name of ARM nor the names of its contributors may be used
emilmont 80:8e73be2a2ac1 21 to endorse or promote products derived from this software without
emilmont 80:8e73be2a2ac1 22 specific prior written permission.
emilmont 80:8e73be2a2ac1 23 *
emilmont 80:8e73be2a2ac1 24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
emilmont 80:8e73be2a2ac1 25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
emilmont 80:8e73be2a2ac1 26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
emilmont 80:8e73be2a2ac1 27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
emilmont 80:8e73be2a2ac1 28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
emilmont 80:8e73be2a2ac1 29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
emilmont 80:8e73be2a2ac1 30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
emilmont 80:8e73be2a2ac1 31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
emilmont 80:8e73be2a2ac1 32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
emilmont 80:8e73be2a2ac1 33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
emilmont 80:8e73be2a2ac1 34 POSSIBILITY OF SUCH DAMAGE.
emilmont 80:8e73be2a2ac1 35 ---------------------------------------------------------------------------*/
emilmont 80:8e73be2a2ac1 36
emilmont 80:8e73be2a2ac1 37
emilmont 80:8e73be2a2ac1 38 #ifdef __cplusplus
emilmont 80:8e73be2a2ac1 39 extern "C" {
emilmont 80:8e73be2a2ac1 40 #endif
emilmont 80:8e73be2a2ac1 41
emilmont 80:8e73be2a2ac1 42 #ifndef __CORE_CM4_SIMD_H
emilmont 80:8e73be2a2ac1 43 #define __CORE_CM4_SIMD_H
emilmont 80:8e73be2a2ac1 44
emilmont 80:8e73be2a2ac1 45
emilmont 80:8e73be2a2ac1 46 /*******************************************************************************
emilmont 80:8e73be2a2ac1 47 * Hardware Abstraction Layer
emilmont 80:8e73be2a2ac1 48 ******************************************************************************/
emilmont 80:8e73be2a2ac1 49
emilmont 80:8e73be2a2ac1 50
emilmont 80:8e73be2a2ac1 51 /* ################### Compiler specific Intrinsics ########################### */
emilmont 80:8e73be2a2ac1 52 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
emilmont 80:8e73be2a2ac1 53 Access to dedicated SIMD instructions
emilmont 80:8e73be2a2ac1 54 @{
emilmont 80:8e73be2a2ac1 55 */
emilmont 80:8e73be2a2ac1 56
emilmont 80:8e73be2a2ac1 57 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
emilmont 80:8e73be2a2ac1 58 /* ARM armcc specific functions */
emilmont 80:8e73be2a2ac1 59
emilmont 80:8e73be2a2ac1 60 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 61 #define __SADD8 __sadd8
emilmont 80:8e73be2a2ac1 62 #define __QADD8 __qadd8
emilmont 80:8e73be2a2ac1 63 #define __SHADD8 __shadd8
emilmont 80:8e73be2a2ac1 64 #define __UADD8 __uadd8
emilmont 80:8e73be2a2ac1 65 #define __UQADD8 __uqadd8
emilmont 80:8e73be2a2ac1 66 #define __UHADD8 __uhadd8
emilmont 80:8e73be2a2ac1 67 #define __SSUB8 __ssub8
emilmont 80:8e73be2a2ac1 68 #define __QSUB8 __qsub8
emilmont 80:8e73be2a2ac1 69 #define __SHSUB8 __shsub8
emilmont 80:8e73be2a2ac1 70 #define __USUB8 __usub8
emilmont 80:8e73be2a2ac1 71 #define __UQSUB8 __uqsub8
emilmont 80:8e73be2a2ac1 72 #define __UHSUB8 __uhsub8
emilmont 80:8e73be2a2ac1 73 #define __SADD16 __sadd16
emilmont 80:8e73be2a2ac1 74 #define __QADD16 __qadd16
emilmont 80:8e73be2a2ac1 75 #define __SHADD16 __shadd16
emilmont 80:8e73be2a2ac1 76 #define __UADD16 __uadd16
emilmont 80:8e73be2a2ac1 77 #define __UQADD16 __uqadd16
emilmont 80:8e73be2a2ac1 78 #define __UHADD16 __uhadd16
emilmont 80:8e73be2a2ac1 79 #define __SSUB16 __ssub16
emilmont 80:8e73be2a2ac1 80 #define __QSUB16 __qsub16
emilmont 80:8e73be2a2ac1 81 #define __SHSUB16 __shsub16
emilmont 80:8e73be2a2ac1 82 #define __USUB16 __usub16
emilmont 80:8e73be2a2ac1 83 #define __UQSUB16 __uqsub16
emilmont 80:8e73be2a2ac1 84 #define __UHSUB16 __uhsub16
emilmont 80:8e73be2a2ac1 85 #define __SASX __sasx
emilmont 80:8e73be2a2ac1 86 #define __QASX __qasx
emilmont 80:8e73be2a2ac1 87 #define __SHASX __shasx
emilmont 80:8e73be2a2ac1 88 #define __UASX __uasx
emilmont 80:8e73be2a2ac1 89 #define __UQASX __uqasx
emilmont 80:8e73be2a2ac1 90 #define __UHASX __uhasx
emilmont 80:8e73be2a2ac1 91 #define __SSAX __ssax
emilmont 80:8e73be2a2ac1 92 #define __QSAX __qsax
emilmont 80:8e73be2a2ac1 93 #define __SHSAX __shsax
emilmont 80:8e73be2a2ac1 94 #define __USAX __usax
emilmont 80:8e73be2a2ac1 95 #define __UQSAX __uqsax
emilmont 80:8e73be2a2ac1 96 #define __UHSAX __uhsax
emilmont 80:8e73be2a2ac1 97 #define __USAD8 __usad8
emilmont 80:8e73be2a2ac1 98 #define __USADA8 __usada8
emilmont 80:8e73be2a2ac1 99 #define __SSAT16 __ssat16
emilmont 80:8e73be2a2ac1 100 #define __USAT16 __usat16
emilmont 80:8e73be2a2ac1 101 #define __UXTB16 __uxtb16
emilmont 80:8e73be2a2ac1 102 #define __UXTAB16 __uxtab16
emilmont 80:8e73be2a2ac1 103 #define __SXTB16 __sxtb16
emilmont 80:8e73be2a2ac1 104 #define __SXTAB16 __sxtab16
emilmont 80:8e73be2a2ac1 105 #define __SMUAD __smuad
emilmont 80:8e73be2a2ac1 106 #define __SMUADX __smuadx
emilmont 80:8e73be2a2ac1 107 #define __SMLAD __smlad
emilmont 80:8e73be2a2ac1 108 #define __SMLADX __smladx
emilmont 80:8e73be2a2ac1 109 #define __SMLALD __smlald
emilmont 80:8e73be2a2ac1 110 #define __SMLALDX __smlaldx
emilmont 80:8e73be2a2ac1 111 #define __SMUSD __smusd
emilmont 80:8e73be2a2ac1 112 #define __SMUSDX __smusdx
emilmont 80:8e73be2a2ac1 113 #define __SMLSD __smlsd
emilmont 80:8e73be2a2ac1 114 #define __SMLSDX __smlsdx
emilmont 80:8e73be2a2ac1 115 #define __SMLSLD __smlsld
emilmont 80:8e73be2a2ac1 116 #define __SMLSLDX __smlsldx
emilmont 80:8e73be2a2ac1 117 #define __SEL __sel
emilmont 80:8e73be2a2ac1 118 #define __QADD __qadd
emilmont 80:8e73be2a2ac1 119 #define __QSUB __qsub
emilmont 80:8e73be2a2ac1 120
emilmont 80:8e73be2a2ac1 121 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
emilmont 80:8e73be2a2ac1 122 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
emilmont 80:8e73be2a2ac1 123
emilmont 80:8e73be2a2ac1 124 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
emilmont 80:8e73be2a2ac1 125 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
emilmont 80:8e73be2a2ac1 126
emilmont 80:8e73be2a2ac1 127 #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
emilmont 80:8e73be2a2ac1 128 ((int64_t)(ARG3) << 32) ) >> 32))
emilmont 80:8e73be2a2ac1 129
emilmont 80:8e73be2a2ac1 130 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 131
emilmont 80:8e73be2a2ac1 132
emilmont 80:8e73be2a2ac1 133
emilmont 80:8e73be2a2ac1 134 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
emilmont 80:8e73be2a2ac1 135 /* IAR iccarm specific functions */
emilmont 80:8e73be2a2ac1 136
emilmont 80:8e73be2a2ac1 137 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 138 #include <cmsis_iar.h>
emilmont 80:8e73be2a2ac1 139
emilmont 80:8e73be2a2ac1 140 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 141
emilmont 80:8e73be2a2ac1 142
emilmont 80:8e73be2a2ac1 143
emilmont 80:8e73be2a2ac1 144 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
emilmont 80:8e73be2a2ac1 145 /* TI CCS specific functions */
emilmont 80:8e73be2a2ac1 146
emilmont 80:8e73be2a2ac1 147 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 148 #include <cmsis_ccs.h>
emilmont 80:8e73be2a2ac1 149
emilmont 80:8e73be2a2ac1 150 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 151
emilmont 80:8e73be2a2ac1 152
emilmont 80:8e73be2a2ac1 153
emilmont 80:8e73be2a2ac1 154 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
emilmont 80:8e73be2a2ac1 155 /* GNU gcc specific functions */
emilmont 80:8e73be2a2ac1 156
emilmont 80:8e73be2a2ac1 157 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 158 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 159 {
emilmont 80:8e73be2a2ac1 160 uint32_t result;
emilmont 80:8e73be2a2ac1 161
emilmont 80:8e73be2a2ac1 162 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 163 return(result);
emilmont 80:8e73be2a2ac1 164 }
emilmont 80:8e73be2a2ac1 165
emilmont 80:8e73be2a2ac1 166 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 167 {
emilmont 80:8e73be2a2ac1 168 uint32_t result;
emilmont 80:8e73be2a2ac1 169
emilmont 80:8e73be2a2ac1 170 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 171 return(result);
emilmont 80:8e73be2a2ac1 172 }
emilmont 80:8e73be2a2ac1 173
emilmont 80:8e73be2a2ac1 174 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 175 {
emilmont 80:8e73be2a2ac1 176 uint32_t result;
emilmont 80:8e73be2a2ac1 177
emilmont 80:8e73be2a2ac1 178 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 179 return(result);
emilmont 80:8e73be2a2ac1 180 }
emilmont 80:8e73be2a2ac1 181
emilmont 80:8e73be2a2ac1 182 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 183 {
emilmont 80:8e73be2a2ac1 184 uint32_t result;
emilmont 80:8e73be2a2ac1 185
emilmont 80:8e73be2a2ac1 186 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 187 return(result);
emilmont 80:8e73be2a2ac1 188 }
emilmont 80:8e73be2a2ac1 189
emilmont 80:8e73be2a2ac1 190 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 191 {
emilmont 80:8e73be2a2ac1 192 uint32_t result;
emilmont 80:8e73be2a2ac1 193
emilmont 80:8e73be2a2ac1 194 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 195 return(result);
emilmont 80:8e73be2a2ac1 196 }
emilmont 80:8e73be2a2ac1 197
emilmont 80:8e73be2a2ac1 198 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 199 {
emilmont 80:8e73be2a2ac1 200 uint32_t result;
emilmont 80:8e73be2a2ac1 201
emilmont 80:8e73be2a2ac1 202 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 203 return(result);
emilmont 80:8e73be2a2ac1 204 }
emilmont 80:8e73be2a2ac1 205
emilmont 80:8e73be2a2ac1 206
emilmont 80:8e73be2a2ac1 207 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 208 {
emilmont 80:8e73be2a2ac1 209 uint32_t result;
emilmont 80:8e73be2a2ac1 210
emilmont 80:8e73be2a2ac1 211 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 212 return(result);
emilmont 80:8e73be2a2ac1 213 }
emilmont 80:8e73be2a2ac1 214
emilmont 80:8e73be2a2ac1 215 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 216 {
emilmont 80:8e73be2a2ac1 217 uint32_t result;
emilmont 80:8e73be2a2ac1 218
emilmont 80:8e73be2a2ac1 219 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 220 return(result);
emilmont 80:8e73be2a2ac1 221 }
emilmont 80:8e73be2a2ac1 222
emilmont 80:8e73be2a2ac1 223 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 224 {
emilmont 80:8e73be2a2ac1 225 uint32_t result;
emilmont 80:8e73be2a2ac1 226
emilmont 80:8e73be2a2ac1 227 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 228 return(result);
emilmont 80:8e73be2a2ac1 229 }
emilmont 80:8e73be2a2ac1 230
emilmont 80:8e73be2a2ac1 231 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 232 {
emilmont 80:8e73be2a2ac1 233 uint32_t result;
emilmont 80:8e73be2a2ac1 234
emilmont 80:8e73be2a2ac1 235 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 236 return(result);
emilmont 80:8e73be2a2ac1 237 }
emilmont 80:8e73be2a2ac1 238
emilmont 80:8e73be2a2ac1 239 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 240 {
emilmont 80:8e73be2a2ac1 241 uint32_t result;
emilmont 80:8e73be2a2ac1 242
emilmont 80:8e73be2a2ac1 243 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 244 return(result);
emilmont 80:8e73be2a2ac1 245 }
emilmont 80:8e73be2a2ac1 246
emilmont 80:8e73be2a2ac1 247 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 248 {
emilmont 80:8e73be2a2ac1 249 uint32_t result;
emilmont 80:8e73be2a2ac1 250
emilmont 80:8e73be2a2ac1 251 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 252 return(result);
emilmont 80:8e73be2a2ac1 253 }
emilmont 80:8e73be2a2ac1 254
emilmont 80:8e73be2a2ac1 255
emilmont 80:8e73be2a2ac1 256 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 257 {
emilmont 80:8e73be2a2ac1 258 uint32_t result;
emilmont 80:8e73be2a2ac1 259
emilmont 80:8e73be2a2ac1 260 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 261 return(result);
emilmont 80:8e73be2a2ac1 262 }
emilmont 80:8e73be2a2ac1 263
emilmont 80:8e73be2a2ac1 264 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 265 {
emilmont 80:8e73be2a2ac1 266 uint32_t result;
emilmont 80:8e73be2a2ac1 267
emilmont 80:8e73be2a2ac1 268 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 269 return(result);
emilmont 80:8e73be2a2ac1 270 }
emilmont 80:8e73be2a2ac1 271
emilmont 80:8e73be2a2ac1 272 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 273 {
emilmont 80:8e73be2a2ac1 274 uint32_t result;
emilmont 80:8e73be2a2ac1 275
emilmont 80:8e73be2a2ac1 276 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 277 return(result);
emilmont 80:8e73be2a2ac1 278 }
emilmont 80:8e73be2a2ac1 279
emilmont 80:8e73be2a2ac1 280 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 281 {
emilmont 80:8e73be2a2ac1 282 uint32_t result;
emilmont 80:8e73be2a2ac1 283
emilmont 80:8e73be2a2ac1 284 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 285 return(result);
emilmont 80:8e73be2a2ac1 286 }
emilmont 80:8e73be2a2ac1 287
emilmont 80:8e73be2a2ac1 288 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 289 {
emilmont 80:8e73be2a2ac1 290 uint32_t result;
emilmont 80:8e73be2a2ac1 291
emilmont 80:8e73be2a2ac1 292 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 293 return(result);
emilmont 80:8e73be2a2ac1 294 }
emilmont 80:8e73be2a2ac1 295
emilmont 80:8e73be2a2ac1 296 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 297 {
emilmont 80:8e73be2a2ac1 298 uint32_t result;
emilmont 80:8e73be2a2ac1 299
emilmont 80:8e73be2a2ac1 300 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 301 return(result);
emilmont 80:8e73be2a2ac1 302 }
emilmont 80:8e73be2a2ac1 303
emilmont 80:8e73be2a2ac1 304 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 305 {
emilmont 80:8e73be2a2ac1 306 uint32_t result;
emilmont 80:8e73be2a2ac1 307
emilmont 80:8e73be2a2ac1 308 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 309 return(result);
emilmont 80:8e73be2a2ac1 310 }
emilmont 80:8e73be2a2ac1 311
emilmont 80:8e73be2a2ac1 312 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 313 {
emilmont 80:8e73be2a2ac1 314 uint32_t result;
emilmont 80:8e73be2a2ac1 315
emilmont 80:8e73be2a2ac1 316 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 317 return(result);
emilmont 80:8e73be2a2ac1 318 }
emilmont 80:8e73be2a2ac1 319
emilmont 80:8e73be2a2ac1 320 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 321 {
emilmont 80:8e73be2a2ac1 322 uint32_t result;
emilmont 80:8e73be2a2ac1 323
emilmont 80:8e73be2a2ac1 324 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 325 return(result);
emilmont 80:8e73be2a2ac1 326 }
emilmont 80:8e73be2a2ac1 327
emilmont 80:8e73be2a2ac1 328 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 329 {
emilmont 80:8e73be2a2ac1 330 uint32_t result;
emilmont 80:8e73be2a2ac1 331
emilmont 80:8e73be2a2ac1 332 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 333 return(result);
emilmont 80:8e73be2a2ac1 334 }
emilmont 80:8e73be2a2ac1 335
emilmont 80:8e73be2a2ac1 336 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 337 {
emilmont 80:8e73be2a2ac1 338 uint32_t result;
emilmont 80:8e73be2a2ac1 339
emilmont 80:8e73be2a2ac1 340 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 341 return(result);
emilmont 80:8e73be2a2ac1 342 }
emilmont 80:8e73be2a2ac1 343
emilmont 80:8e73be2a2ac1 344 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 345 {
emilmont 80:8e73be2a2ac1 346 uint32_t result;
emilmont 80:8e73be2a2ac1 347
emilmont 80:8e73be2a2ac1 348 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 349 return(result);
emilmont 80:8e73be2a2ac1 350 }
emilmont 80:8e73be2a2ac1 351
emilmont 80:8e73be2a2ac1 352 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 353 {
emilmont 80:8e73be2a2ac1 354 uint32_t result;
emilmont 80:8e73be2a2ac1 355
emilmont 80:8e73be2a2ac1 356 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 357 return(result);
emilmont 80:8e73be2a2ac1 358 }
emilmont 80:8e73be2a2ac1 359
emilmont 80:8e73be2a2ac1 360 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 361 {
emilmont 80:8e73be2a2ac1 362 uint32_t result;
emilmont 80:8e73be2a2ac1 363
emilmont 80:8e73be2a2ac1 364 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 365 return(result);
emilmont 80:8e73be2a2ac1 366 }
emilmont 80:8e73be2a2ac1 367
emilmont 80:8e73be2a2ac1 368 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 369 {
emilmont 80:8e73be2a2ac1 370 uint32_t result;
emilmont 80:8e73be2a2ac1 371
emilmont 80:8e73be2a2ac1 372 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 373 return(result);
emilmont 80:8e73be2a2ac1 374 }
emilmont 80:8e73be2a2ac1 375
emilmont 80:8e73be2a2ac1 376 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 377 {
emilmont 80:8e73be2a2ac1 378 uint32_t result;
emilmont 80:8e73be2a2ac1 379
emilmont 80:8e73be2a2ac1 380 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 381 return(result);
emilmont 80:8e73be2a2ac1 382 }
emilmont 80:8e73be2a2ac1 383
emilmont 80:8e73be2a2ac1 384 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 385 {
emilmont 80:8e73be2a2ac1 386 uint32_t result;
emilmont 80:8e73be2a2ac1 387
emilmont 80:8e73be2a2ac1 388 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 389 return(result);
emilmont 80:8e73be2a2ac1 390 }
emilmont 80:8e73be2a2ac1 391
emilmont 80:8e73be2a2ac1 392 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 393 {
emilmont 80:8e73be2a2ac1 394 uint32_t result;
emilmont 80:8e73be2a2ac1 395
emilmont 80:8e73be2a2ac1 396 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 397 return(result);
emilmont 80:8e73be2a2ac1 398 }
emilmont 80:8e73be2a2ac1 399
emilmont 80:8e73be2a2ac1 400 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 401 {
emilmont 80:8e73be2a2ac1 402 uint32_t result;
emilmont 80:8e73be2a2ac1 403
emilmont 80:8e73be2a2ac1 404 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 405 return(result);
emilmont 80:8e73be2a2ac1 406 }
emilmont 80:8e73be2a2ac1 407
emilmont 80:8e73be2a2ac1 408 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 409 {
emilmont 80:8e73be2a2ac1 410 uint32_t result;
emilmont 80:8e73be2a2ac1 411
emilmont 80:8e73be2a2ac1 412 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 413 return(result);
emilmont 80:8e73be2a2ac1 414 }
emilmont 80:8e73be2a2ac1 415
emilmont 80:8e73be2a2ac1 416 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 417 {
emilmont 80:8e73be2a2ac1 418 uint32_t result;
emilmont 80:8e73be2a2ac1 419
emilmont 80:8e73be2a2ac1 420 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 421 return(result);
emilmont 80:8e73be2a2ac1 422 }
emilmont 80:8e73be2a2ac1 423
emilmont 80:8e73be2a2ac1 424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 425 {
emilmont 80:8e73be2a2ac1 426 uint32_t result;
emilmont 80:8e73be2a2ac1 427
emilmont 80:8e73be2a2ac1 428 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 429 return(result);
emilmont 80:8e73be2a2ac1 430 }
emilmont 80:8e73be2a2ac1 431
emilmont 80:8e73be2a2ac1 432 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 433 {
emilmont 80:8e73be2a2ac1 434 uint32_t result;
emilmont 80:8e73be2a2ac1 435
emilmont 80:8e73be2a2ac1 436 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 437 return(result);
emilmont 80:8e73be2a2ac1 438 }
emilmont 80:8e73be2a2ac1 439
emilmont 80:8e73be2a2ac1 440 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 441 {
emilmont 80:8e73be2a2ac1 442 uint32_t result;
emilmont 80:8e73be2a2ac1 443
emilmont 80:8e73be2a2ac1 444 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 445 return(result);
emilmont 80:8e73be2a2ac1 446 }
emilmont 80:8e73be2a2ac1 447
emilmont 80:8e73be2a2ac1 448 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 449 {
emilmont 80:8e73be2a2ac1 450 uint32_t result;
emilmont 80:8e73be2a2ac1 451
emilmont 80:8e73be2a2ac1 452 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 453 return(result);
emilmont 80:8e73be2a2ac1 454 }
emilmont 80:8e73be2a2ac1 455
emilmont 80:8e73be2a2ac1 456 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
emilmont 80:8e73be2a2ac1 457 {
emilmont 80:8e73be2a2ac1 458 uint32_t result;
emilmont 80:8e73be2a2ac1 459
emilmont 80:8e73be2a2ac1 460 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
emilmont 80:8e73be2a2ac1 461 return(result);
emilmont 80:8e73be2a2ac1 462 }
emilmont 80:8e73be2a2ac1 463
emilmont 80:8e73be2a2ac1 464 #define __SSAT16(ARG1,ARG2) \
emilmont 80:8e73be2a2ac1 465 ({ \
emilmont 80:8e73be2a2ac1 466 uint32_t __RES, __ARG1 = (ARG1); \
emilmont 80:8e73be2a2ac1 467 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
emilmont 80:8e73be2a2ac1 468 __RES; \
emilmont 80:8e73be2a2ac1 469 })
emilmont 80:8e73be2a2ac1 470
emilmont 80:8e73be2a2ac1 471 #define __USAT16(ARG1,ARG2) \
emilmont 80:8e73be2a2ac1 472 ({ \
emilmont 80:8e73be2a2ac1 473 uint32_t __RES, __ARG1 = (ARG1); \
emilmont 80:8e73be2a2ac1 474 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
emilmont 80:8e73be2a2ac1 475 __RES; \
emilmont 80:8e73be2a2ac1 476 })
emilmont 80:8e73be2a2ac1 477
emilmont 80:8e73be2a2ac1 478 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
emilmont 80:8e73be2a2ac1 479 {
emilmont 80:8e73be2a2ac1 480 uint32_t result;
emilmont 80:8e73be2a2ac1 481
emilmont 80:8e73be2a2ac1 482 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
emilmont 80:8e73be2a2ac1 483 return(result);
emilmont 80:8e73be2a2ac1 484 }
emilmont 80:8e73be2a2ac1 485
emilmont 80:8e73be2a2ac1 486 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 487 {
emilmont 80:8e73be2a2ac1 488 uint32_t result;
emilmont 80:8e73be2a2ac1 489
emilmont 80:8e73be2a2ac1 490 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 491 return(result);
emilmont 80:8e73be2a2ac1 492 }
emilmont 80:8e73be2a2ac1 493
emilmont 80:8e73be2a2ac1 494 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
emilmont 80:8e73be2a2ac1 495 {
emilmont 80:8e73be2a2ac1 496 uint32_t result;
emilmont 80:8e73be2a2ac1 497
emilmont 80:8e73be2a2ac1 498 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
emilmont 80:8e73be2a2ac1 499 return(result);
emilmont 80:8e73be2a2ac1 500 }
emilmont 80:8e73be2a2ac1 501
emilmont 80:8e73be2a2ac1 502 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 503 {
emilmont 80:8e73be2a2ac1 504 uint32_t result;
emilmont 80:8e73be2a2ac1 505
emilmont 80:8e73be2a2ac1 506 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 507 return(result);
emilmont 80:8e73be2a2ac1 508 }
emilmont 80:8e73be2a2ac1 509
emilmont 80:8e73be2a2ac1 510 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 511 {
emilmont 80:8e73be2a2ac1 512 uint32_t result;
emilmont 80:8e73be2a2ac1 513
emilmont 80:8e73be2a2ac1 514 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 515 return(result);
emilmont 80:8e73be2a2ac1 516 }
emilmont 80:8e73be2a2ac1 517
emilmont 80:8e73be2a2ac1 518 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 519 {
emilmont 80:8e73be2a2ac1 520 uint32_t result;
emilmont 80:8e73be2a2ac1 521
emilmont 80:8e73be2a2ac1 522 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 523 return(result);
emilmont 80:8e73be2a2ac1 524 }
emilmont 80:8e73be2a2ac1 525
emilmont 80:8e73be2a2ac1 526 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
emilmont 80:8e73be2a2ac1 527 {
emilmont 80:8e73be2a2ac1 528 uint32_t result;
emilmont 80:8e73be2a2ac1 529
emilmont 80:8e73be2a2ac1 530 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
emilmont 80:8e73be2a2ac1 531 return(result);
emilmont 80:8e73be2a2ac1 532 }
emilmont 80:8e73be2a2ac1 533
emilmont 80:8e73be2a2ac1 534 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
emilmont 80:8e73be2a2ac1 535 {
emilmont 80:8e73be2a2ac1 536 uint32_t result;
emilmont 80:8e73be2a2ac1 537
emilmont 80:8e73be2a2ac1 538 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
emilmont 80:8e73be2a2ac1 539 return(result);
emilmont 80:8e73be2a2ac1 540 }
emilmont 80:8e73be2a2ac1 541
emilmont 80:8e73be2a2ac1 542 #define __SMLALD(ARG1,ARG2,ARG3) \
emilmont 80:8e73be2a2ac1 543 ({ \
emilmont 80:8e73be2a2ac1 544 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((uint64_t)(ARG3) >> 32), __ARG3_L = (uint32_t)((uint64_t)(ARG3) & 0xFFFFFFFFUL); \
emilmont 80:8e73be2a2ac1 545 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
emilmont 80:8e73be2a2ac1 546 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
emilmont 80:8e73be2a2ac1 547 })
emilmont 80:8e73be2a2ac1 548
emilmont 80:8e73be2a2ac1 549 #define __SMLALDX(ARG1,ARG2,ARG3) \
emilmont 80:8e73be2a2ac1 550 ({ \
emilmont 80:8e73be2a2ac1 551 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((uint64_t)(ARG3) >> 32), __ARG3_L = (uint32_t)((uint64_t)(ARG3) & 0xFFFFFFFFUL); \
emilmont 80:8e73be2a2ac1 552 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
emilmont 80:8e73be2a2ac1 553 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
emilmont 80:8e73be2a2ac1 554 })
emilmont 80:8e73be2a2ac1 555
emilmont 80:8e73be2a2ac1 556 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 557 {
emilmont 80:8e73be2a2ac1 558 uint32_t result;
emilmont 80:8e73be2a2ac1 559
emilmont 80:8e73be2a2ac1 560 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 561 return(result);
emilmont 80:8e73be2a2ac1 562 }
emilmont 80:8e73be2a2ac1 563
emilmont 80:8e73be2a2ac1 564 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 565 {
emilmont 80:8e73be2a2ac1 566 uint32_t result;
emilmont 80:8e73be2a2ac1 567
emilmont 80:8e73be2a2ac1 568 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 569 return(result);
emilmont 80:8e73be2a2ac1 570 }
emilmont 80:8e73be2a2ac1 571
emilmont 80:8e73be2a2ac1 572 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
emilmont 80:8e73be2a2ac1 573 {
emilmont 80:8e73be2a2ac1 574 uint32_t result;
emilmont 80:8e73be2a2ac1 575
emilmont 80:8e73be2a2ac1 576 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
emilmont 80:8e73be2a2ac1 577 return(result);
emilmont 80:8e73be2a2ac1 578 }
emilmont 80:8e73be2a2ac1 579
emilmont 80:8e73be2a2ac1 580 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
emilmont 80:8e73be2a2ac1 581 {
emilmont 80:8e73be2a2ac1 582 uint32_t result;
emilmont 80:8e73be2a2ac1 583
emilmont 80:8e73be2a2ac1 584 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
emilmont 80:8e73be2a2ac1 585 return(result);
emilmont 80:8e73be2a2ac1 586 }
emilmont 80:8e73be2a2ac1 587
emilmont 80:8e73be2a2ac1 588 #define __SMLSLD(ARG1,ARG2,ARG3) \
emilmont 80:8e73be2a2ac1 589 ({ \
emilmont 80:8e73be2a2ac1 590 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((ARG3) >> 32), __ARG3_L = (uint32_t)((ARG3) & 0xFFFFFFFFUL); \
emilmont 80:8e73be2a2ac1 591 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
emilmont 80:8e73be2a2ac1 592 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
emilmont 80:8e73be2a2ac1 593 })
emilmont 80:8e73be2a2ac1 594
emilmont 80:8e73be2a2ac1 595 #define __SMLSLDX(ARG1,ARG2,ARG3) \
emilmont 80:8e73be2a2ac1 596 ({ \
emilmont 80:8e73be2a2ac1 597 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((ARG3) >> 32), __ARG3_L = (uint32_t)((ARG3) & 0xFFFFFFFFUL); \
emilmont 80:8e73be2a2ac1 598 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
emilmont 80:8e73be2a2ac1 599 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
emilmont 80:8e73be2a2ac1 600 })
emilmont 80:8e73be2a2ac1 601
emilmont 80:8e73be2a2ac1 602 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 603 {
emilmont 80:8e73be2a2ac1 604 uint32_t result;
emilmont 80:8e73be2a2ac1 605
emilmont 80:8e73be2a2ac1 606 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 607 return(result);
emilmont 80:8e73be2a2ac1 608 }
emilmont 80:8e73be2a2ac1 609
emilmont 80:8e73be2a2ac1 610 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 611 {
emilmont 80:8e73be2a2ac1 612 uint32_t result;
emilmont 80:8e73be2a2ac1 613
emilmont 80:8e73be2a2ac1 614 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 615 return(result);
emilmont 80:8e73be2a2ac1 616 }
emilmont 80:8e73be2a2ac1 617
emilmont 80:8e73be2a2ac1 618 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB(uint32_t op1, uint32_t op2)
emilmont 80:8e73be2a2ac1 619 {
emilmont 80:8e73be2a2ac1 620 uint32_t result;
emilmont 80:8e73be2a2ac1 621
emilmont 80:8e73be2a2ac1 622 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
emilmont 80:8e73be2a2ac1 623 return(result);
emilmont 80:8e73be2a2ac1 624 }
emilmont 80:8e73be2a2ac1 625
emilmont 80:8e73be2a2ac1 626 #define __PKHBT(ARG1,ARG2,ARG3) \
emilmont 80:8e73be2a2ac1 627 ({ \
emilmont 80:8e73be2a2ac1 628 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
emilmont 80:8e73be2a2ac1 629 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
emilmont 80:8e73be2a2ac1 630 __RES; \
emilmont 80:8e73be2a2ac1 631 })
emilmont 80:8e73be2a2ac1 632
emilmont 80:8e73be2a2ac1 633 #define __PKHTB(ARG1,ARG2,ARG3) \
emilmont 80:8e73be2a2ac1 634 ({ \
emilmont 80:8e73be2a2ac1 635 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
emilmont 80:8e73be2a2ac1 636 if (ARG3 == 0) \
emilmont 80:8e73be2a2ac1 637 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
emilmont 80:8e73be2a2ac1 638 else \
emilmont 80:8e73be2a2ac1 639 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
emilmont 80:8e73be2a2ac1 640 __RES; \
emilmont 80:8e73be2a2ac1 641 })
emilmont 80:8e73be2a2ac1 642
emilmont 80:8e73be2a2ac1 643 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
emilmont 80:8e73be2a2ac1 644 {
emilmont 80:8e73be2a2ac1 645 int32_t result;
emilmont 80:8e73be2a2ac1 646
emilmont 80:8e73be2a2ac1 647 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
emilmont 80:8e73be2a2ac1 648 return(result);
emilmont 80:8e73be2a2ac1 649 }
emilmont 80:8e73be2a2ac1 650
emilmont 80:8e73be2a2ac1 651 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 652
emilmont 80:8e73be2a2ac1 653
emilmont 80:8e73be2a2ac1 654
emilmont 80:8e73be2a2ac1 655 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
emilmont 80:8e73be2a2ac1 656 /* TASKING carm specific functions */
emilmont 80:8e73be2a2ac1 657
emilmont 80:8e73be2a2ac1 658
emilmont 80:8e73be2a2ac1 659 /*------ CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 660 /* not yet supported */
emilmont 80:8e73be2a2ac1 661 /*-- End CM4 SIMD Intrinsics -----------------------------------------------------*/
emilmont 80:8e73be2a2ac1 662
emilmont 80:8e73be2a2ac1 663
emilmont 80:8e73be2a2ac1 664 #endif
emilmont 80:8e73be2a2ac1 665
emilmont 80:8e73be2a2ac1 666 /*@} end of group CMSIS_SIMD_intrinsics */
emilmont 80:8e73be2a2ac1 667
emilmont 80:8e73be2a2ac1 668
emilmont 80:8e73be2a2ac1 669 #endif /* __CORE_CM4_SIMD_H */
emilmont 80:8e73be2a2ac1 670
emilmont 80:8e73be2a2ac1 671 #ifdef __cplusplus
emilmont 80:8e73be2a2ac1 672 }
emilmont 80:8e73be2a2ac1 673 #endif