xref: /nrf52832-nimble/rt-thread/components/CMSIS/Include/core_cmInstr.h (revision 104654410c56c573564690304ae786df310c91fc)
1*10465441SEvalZero /**************************************************************************//**
2*10465441SEvalZero  * @file     core_cmInstr.h
3*10465441SEvalZero  * @brief    CMSIS Cortex-M Core Instruction Access Header File
4*10465441SEvalZero  * @version  V3.20
5*10465441SEvalZero  * @date     05. March 2013
6*10465441SEvalZero  *
7*10465441SEvalZero  * @note
8*10465441SEvalZero  *
9*10465441SEvalZero  ******************************************************************************/
10*10465441SEvalZero /* Copyright (c) 2009 - 2013 ARM LIMITED
11*10465441SEvalZero 
12*10465441SEvalZero    All rights reserved.
13*10465441SEvalZero    Redistribution and use in source and binary forms, with or without
14*10465441SEvalZero    modification, are permitted provided that the following conditions are met:
15*10465441SEvalZero    - Redistributions of source code must retain the above copyright
16*10465441SEvalZero      notice, this list of conditions and the following disclaimer.
17*10465441SEvalZero    - Redistributions in binary form must reproduce the above copyright
18*10465441SEvalZero      notice, this list of conditions and the following disclaimer in the
19*10465441SEvalZero      documentation and/or other materials provided with the distribution.
20*10465441SEvalZero    - Neither the name of ARM nor the names of its contributors may be used
21*10465441SEvalZero      to endorse or promote products derived from this software without
22*10465441SEvalZero      specific prior written permission.
23*10465441SEvalZero    *
24*10465441SEvalZero    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25*10465441SEvalZero    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26*10465441SEvalZero    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27*10465441SEvalZero    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28*10465441SEvalZero    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29*10465441SEvalZero    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30*10465441SEvalZero    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31*10465441SEvalZero    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32*10465441SEvalZero    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33*10465441SEvalZero    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34*10465441SEvalZero    POSSIBILITY OF SUCH DAMAGE.
35*10465441SEvalZero    ---------------------------------------------------------------------------*/
36*10465441SEvalZero 
37*10465441SEvalZero 
38*10465441SEvalZero #ifndef __CORE_CMINSTR_H
39*10465441SEvalZero #define __CORE_CMINSTR_H
40*10465441SEvalZero 
41*10465441SEvalZero 
42*10465441SEvalZero /* ##########################  Core Instruction Access  ######################### */
43*10465441SEvalZero /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
44*10465441SEvalZero   Access to dedicated instructions
45*10465441SEvalZero   @{
46*10465441SEvalZero */
47*10465441SEvalZero 
48*10465441SEvalZero #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49*10465441SEvalZero /* ARM armcc specific functions */
50*10465441SEvalZero 
51*10465441SEvalZero #if (__ARMCC_VERSION < 400677)
52*10465441SEvalZero   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53*10465441SEvalZero #endif
54*10465441SEvalZero 
55*10465441SEvalZero 
56*10465441SEvalZero /** \brief  No Operation
57*10465441SEvalZero 
58*10465441SEvalZero     No Operation does nothing. This instruction can be used for code alignment purposes.
59*10465441SEvalZero  */
60*10465441SEvalZero #define __NOP                             __nop
61*10465441SEvalZero 
62*10465441SEvalZero 
63*10465441SEvalZero /** \brief  Wait For Interrupt
64*10465441SEvalZero 
65*10465441SEvalZero     Wait For Interrupt is a hint instruction that suspends execution
66*10465441SEvalZero     until one of a number of events occurs.
67*10465441SEvalZero  */
68*10465441SEvalZero #define __WFI                             __wfi
69*10465441SEvalZero 
70*10465441SEvalZero 
71*10465441SEvalZero /** \brief  Wait For Event
72*10465441SEvalZero 
73*10465441SEvalZero     Wait For Event is a hint instruction that permits the processor to enter
74*10465441SEvalZero     a low-power state until one of a number of events occurs.
75*10465441SEvalZero  */
76*10465441SEvalZero #define __WFE                             __wfe
77*10465441SEvalZero 
78*10465441SEvalZero 
79*10465441SEvalZero /** \brief  Send Event
80*10465441SEvalZero 
81*10465441SEvalZero     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
82*10465441SEvalZero  */
83*10465441SEvalZero #define __SEV                             __sev
84*10465441SEvalZero 
85*10465441SEvalZero 
86*10465441SEvalZero /** \brief  Instruction Synchronization Barrier
87*10465441SEvalZero 
88*10465441SEvalZero     Instruction Synchronization Barrier flushes the pipeline in the processor,
89*10465441SEvalZero     so that all instructions following the ISB are fetched from cache or
90*10465441SEvalZero     memory, after the instruction has been completed.
91*10465441SEvalZero  */
92*10465441SEvalZero #define __ISB()                           __isb(0xF)
93*10465441SEvalZero 
94*10465441SEvalZero 
95*10465441SEvalZero /** \brief  Data Synchronization Barrier
96*10465441SEvalZero 
97*10465441SEvalZero     This function acts as a special kind of Data Memory Barrier.
98*10465441SEvalZero     It completes when all explicit memory accesses before this instruction complete.
99*10465441SEvalZero  */
100*10465441SEvalZero #define __DSB()                           __dsb(0xF)
101*10465441SEvalZero 
102*10465441SEvalZero 
103*10465441SEvalZero /** \brief  Data Memory Barrier
104*10465441SEvalZero 
105*10465441SEvalZero     This function ensures the apparent order of the explicit memory operations before
106*10465441SEvalZero     and after the instruction, without ensuring their completion.
107*10465441SEvalZero  */
108*10465441SEvalZero #define __DMB()                           __dmb(0xF)
109*10465441SEvalZero 
110*10465441SEvalZero 
111*10465441SEvalZero /** \brief  Reverse byte order (32 bit)
112*10465441SEvalZero 
113*10465441SEvalZero     This function reverses the byte order in integer value.
114*10465441SEvalZero 
115*10465441SEvalZero     \param [in]    value  Value to reverse
116*10465441SEvalZero     \return               Reversed value
117*10465441SEvalZero  */
118*10465441SEvalZero #define __REV                             __rev
119*10465441SEvalZero 
120*10465441SEvalZero 
121*10465441SEvalZero /** \brief  Reverse byte order (16 bit)
122*10465441SEvalZero 
123*10465441SEvalZero     This function reverses the byte order in two unsigned short values.
124*10465441SEvalZero 
125*10465441SEvalZero     \param [in]    value  Value to reverse
126*10465441SEvalZero     \return               Reversed value
127*10465441SEvalZero  */
128*10465441SEvalZero #ifndef __NO_EMBEDDED_ASM
__REV16(uint32_t value)129*10465441SEvalZero __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
130*10465441SEvalZero {
131*10465441SEvalZero   rev16 r0, r0
132*10465441SEvalZero   bx lr
133*10465441SEvalZero }
134*10465441SEvalZero #endif
135*10465441SEvalZero 
136*10465441SEvalZero /** \brief  Reverse byte order in signed short value
137*10465441SEvalZero 
138*10465441SEvalZero     This function reverses the byte order in a signed short value with sign extension to integer.
139*10465441SEvalZero 
140*10465441SEvalZero     \param [in]    value  Value to reverse
141*10465441SEvalZero     \return               Reversed value
142*10465441SEvalZero  */
143*10465441SEvalZero #ifndef __NO_EMBEDDED_ASM
__REVSH(int32_t value)144*10465441SEvalZero __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
145*10465441SEvalZero {
146*10465441SEvalZero   revsh r0, r0
147*10465441SEvalZero   bx lr
148*10465441SEvalZero }
149*10465441SEvalZero #endif
150*10465441SEvalZero 
151*10465441SEvalZero 
152*10465441SEvalZero /** \brief  Rotate Right in unsigned value (32 bit)
153*10465441SEvalZero 
154*10465441SEvalZero     This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
155*10465441SEvalZero 
156*10465441SEvalZero     \param [in]    value  Value to rotate
157*10465441SEvalZero     \param [in]    value  Number of Bits to rotate
158*10465441SEvalZero     \return               Rotated value
159*10465441SEvalZero  */
160*10465441SEvalZero #define __ROR                             __ror
161*10465441SEvalZero 
162*10465441SEvalZero 
163*10465441SEvalZero /** \brief  Breakpoint
164*10465441SEvalZero 
165*10465441SEvalZero     This function causes the processor to enter Debug state.
166*10465441SEvalZero     Debug tools can use this to investigate system state when the instruction at a particular address is reached.
167*10465441SEvalZero 
168*10465441SEvalZero     \param [in]    value  is ignored by the processor.
169*10465441SEvalZero                    If required, a debugger can use it to store additional information about the breakpoint.
170*10465441SEvalZero  */
171*10465441SEvalZero #define __BKPT(value)                       __breakpoint(value)
172*10465441SEvalZero 
173*10465441SEvalZero 
174*10465441SEvalZero #if       (__CORTEX_M >= 0x03)
175*10465441SEvalZero 
176*10465441SEvalZero /** \brief  Reverse bit order of value
177*10465441SEvalZero 
178*10465441SEvalZero     This function reverses the bit order of the given value.
179*10465441SEvalZero 
180*10465441SEvalZero     \param [in]    value  Value to reverse
181*10465441SEvalZero     \return               Reversed value
182*10465441SEvalZero  */
183*10465441SEvalZero #define __RBIT                            __rbit
184*10465441SEvalZero 
185*10465441SEvalZero 
186*10465441SEvalZero /** \brief  LDR Exclusive (8 bit)
187*10465441SEvalZero 
188*10465441SEvalZero     This function performs a exclusive LDR command for 8 bit value.
189*10465441SEvalZero 
190*10465441SEvalZero     \param [in]    ptr  Pointer to data
191*10465441SEvalZero     \return             value of type uint8_t at (*ptr)
192*10465441SEvalZero  */
193*10465441SEvalZero #define __LDREXB(ptr)                     ((uint8_t ) __ldrex(ptr))
194*10465441SEvalZero 
195*10465441SEvalZero 
196*10465441SEvalZero /** \brief  LDR Exclusive (16 bit)
197*10465441SEvalZero 
198*10465441SEvalZero     This function performs a exclusive LDR command for 16 bit values.
199*10465441SEvalZero 
200*10465441SEvalZero     \param [in]    ptr  Pointer to data
201*10465441SEvalZero     \return        value of type uint16_t at (*ptr)
202*10465441SEvalZero  */
203*10465441SEvalZero #define __LDREXH(ptr)                     ((uint16_t) __ldrex(ptr))
204*10465441SEvalZero 
205*10465441SEvalZero 
206*10465441SEvalZero /** \brief  LDR Exclusive (32 bit)
207*10465441SEvalZero 
208*10465441SEvalZero     This function performs a exclusive LDR command for 32 bit values.
209*10465441SEvalZero 
210*10465441SEvalZero     \param [in]    ptr  Pointer to data
211*10465441SEvalZero     \return        value of type uint32_t at (*ptr)
212*10465441SEvalZero  */
213*10465441SEvalZero #define __LDREXW(ptr)                     ((uint32_t ) __ldrex(ptr))
214*10465441SEvalZero 
215*10465441SEvalZero 
216*10465441SEvalZero /** \brief  STR Exclusive (8 bit)
217*10465441SEvalZero 
218*10465441SEvalZero     This function performs a exclusive STR command for 8 bit values.
219*10465441SEvalZero 
220*10465441SEvalZero     \param [in]  value  Value to store
221*10465441SEvalZero     \param [in]    ptr  Pointer to location
222*10465441SEvalZero     \return          0  Function succeeded
223*10465441SEvalZero     \return          1  Function failed
224*10465441SEvalZero  */
225*10465441SEvalZero #define __STREXB(value, ptr)              __strex(value, ptr)
226*10465441SEvalZero 
227*10465441SEvalZero 
228*10465441SEvalZero /** \brief  STR Exclusive (16 bit)
229*10465441SEvalZero 
230*10465441SEvalZero     This function performs a exclusive STR command for 16 bit values.
231*10465441SEvalZero 
232*10465441SEvalZero     \param [in]  value  Value to store
233*10465441SEvalZero     \param [in]    ptr  Pointer to location
234*10465441SEvalZero     \return          0  Function succeeded
235*10465441SEvalZero     \return          1  Function failed
236*10465441SEvalZero  */
237*10465441SEvalZero #define __STREXH(value, ptr)              __strex(value, ptr)
238*10465441SEvalZero 
239*10465441SEvalZero 
240*10465441SEvalZero /** \brief  STR Exclusive (32 bit)
241*10465441SEvalZero 
242*10465441SEvalZero     This function performs a exclusive STR command for 32 bit values.
243*10465441SEvalZero 
244*10465441SEvalZero     \param [in]  value  Value to store
245*10465441SEvalZero     \param [in]    ptr  Pointer to location
246*10465441SEvalZero     \return          0  Function succeeded
247*10465441SEvalZero     \return          1  Function failed
248*10465441SEvalZero  */
249*10465441SEvalZero #define __STREXW(value, ptr)              __strex(value, ptr)
250*10465441SEvalZero 
251*10465441SEvalZero 
252*10465441SEvalZero /** \brief  Remove the exclusive lock
253*10465441SEvalZero 
254*10465441SEvalZero     This function removes the exclusive lock which is created by LDREX.
255*10465441SEvalZero 
256*10465441SEvalZero  */
257*10465441SEvalZero #define __CLREX                           __clrex
258*10465441SEvalZero 
259*10465441SEvalZero 
260*10465441SEvalZero /** \brief  Signed Saturate
261*10465441SEvalZero 
262*10465441SEvalZero     This function saturates a signed value.
263*10465441SEvalZero 
264*10465441SEvalZero     \param [in]  value  Value to be saturated
265*10465441SEvalZero     \param [in]    sat  Bit position to saturate to (1..32)
266*10465441SEvalZero     \return             Saturated value
267*10465441SEvalZero  */
268*10465441SEvalZero #define __SSAT                            __ssat
269*10465441SEvalZero 
270*10465441SEvalZero 
271*10465441SEvalZero /** \brief  Unsigned Saturate
272*10465441SEvalZero 
273*10465441SEvalZero     This function saturates an unsigned value.
274*10465441SEvalZero 
275*10465441SEvalZero     \param [in]  value  Value to be saturated
276*10465441SEvalZero     \param [in]    sat  Bit position to saturate to (0..31)
277*10465441SEvalZero     \return             Saturated value
278*10465441SEvalZero  */
279*10465441SEvalZero #define __USAT                            __usat
280*10465441SEvalZero 
281*10465441SEvalZero 
282*10465441SEvalZero /** \brief  Count leading zeros
283*10465441SEvalZero 
284*10465441SEvalZero     This function counts the number of leading zeros of a data value.
285*10465441SEvalZero 
286*10465441SEvalZero     \param [in]  value  Value to count the leading zeros
287*10465441SEvalZero     \return             number of leading zeros in value
288*10465441SEvalZero  */
289*10465441SEvalZero #define __CLZ                             __clz
290*10465441SEvalZero 
291*10465441SEvalZero #endif /* (__CORTEX_M >= 0x03) */
292*10465441SEvalZero 
293*10465441SEvalZero 
294*10465441SEvalZero 
295*10465441SEvalZero #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
296*10465441SEvalZero /* IAR iccarm specific functions */
297*10465441SEvalZero 
298*10465441SEvalZero #include <cmsis_iar.h>
299*10465441SEvalZero 
300*10465441SEvalZero 
301*10465441SEvalZero #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
302*10465441SEvalZero /* TI CCS specific functions */
303*10465441SEvalZero 
304*10465441SEvalZero #include <cmsis_ccs.h>
305*10465441SEvalZero 
306*10465441SEvalZero 
307*10465441SEvalZero #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
308*10465441SEvalZero /* GNU gcc specific functions */
309*10465441SEvalZero 
310*10465441SEvalZero /* Define macros for porting to both thumb1 and thumb2.
311*10465441SEvalZero  * For thumb1, use low register (r0-r7), specified by constrant "l"
312*10465441SEvalZero  * Otherwise, use general registers, specified by constrant "r" */
313*10465441SEvalZero #if defined (__thumb__) && !defined (__thumb2__)
314*10465441SEvalZero #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
315*10465441SEvalZero #define __CMSIS_GCC_USE_REG(r) "l" (r)
316*10465441SEvalZero #else
317*10465441SEvalZero #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
318*10465441SEvalZero #define __CMSIS_GCC_USE_REG(r) "r" (r)
319*10465441SEvalZero #endif
320*10465441SEvalZero 
321*10465441SEvalZero /** \brief  No Operation
322*10465441SEvalZero 
323*10465441SEvalZero     No Operation does nothing. This instruction can be used for code alignment purposes.
324*10465441SEvalZero  */
__NOP(void)325*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
326*10465441SEvalZero {
327*10465441SEvalZero   __ASM volatile ("nop");
328*10465441SEvalZero }
329*10465441SEvalZero 
330*10465441SEvalZero 
331*10465441SEvalZero /** \brief  Wait For Interrupt
332*10465441SEvalZero 
333*10465441SEvalZero     Wait For Interrupt is a hint instruction that suspends execution
334*10465441SEvalZero     until one of a number of events occurs.
335*10465441SEvalZero  */
__WFI(void)336*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
337*10465441SEvalZero {
338*10465441SEvalZero   __ASM volatile ("wfi");
339*10465441SEvalZero }
340*10465441SEvalZero 
341*10465441SEvalZero 
342*10465441SEvalZero /** \brief  Wait For Event
343*10465441SEvalZero 
344*10465441SEvalZero     Wait For Event is a hint instruction that permits the processor to enter
345*10465441SEvalZero     a low-power state until one of a number of events occurs.
346*10465441SEvalZero  */
__WFE(void)347*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
348*10465441SEvalZero {
349*10465441SEvalZero   __ASM volatile ("wfe");
350*10465441SEvalZero }
351*10465441SEvalZero 
352*10465441SEvalZero 
353*10465441SEvalZero /** \brief  Send Event
354*10465441SEvalZero 
355*10465441SEvalZero     Send Event is a hint instruction. It causes an event to be signaled to the CPU.
356*10465441SEvalZero  */
__SEV(void)357*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
358*10465441SEvalZero {
359*10465441SEvalZero   __ASM volatile ("sev");
360*10465441SEvalZero }
361*10465441SEvalZero 
362*10465441SEvalZero 
363*10465441SEvalZero /** \brief  Instruction Synchronization Barrier
364*10465441SEvalZero 
365*10465441SEvalZero     Instruction Synchronization Barrier flushes the pipeline in the processor,
366*10465441SEvalZero     so that all instructions following the ISB are fetched from cache or
367*10465441SEvalZero     memory, after the instruction has been completed.
368*10465441SEvalZero  */
__ISB(void)369*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
370*10465441SEvalZero {
371*10465441SEvalZero   __ASM volatile ("isb");
372*10465441SEvalZero }
373*10465441SEvalZero 
374*10465441SEvalZero 
375*10465441SEvalZero /** \brief  Data Synchronization Barrier
376*10465441SEvalZero 
377*10465441SEvalZero     This function acts as a special kind of Data Memory Barrier.
378*10465441SEvalZero     It completes when all explicit memory accesses before this instruction complete.
379*10465441SEvalZero  */
__DSB(void)380*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
381*10465441SEvalZero {
382*10465441SEvalZero   __ASM volatile ("dsb");
383*10465441SEvalZero }
384*10465441SEvalZero 
385*10465441SEvalZero 
386*10465441SEvalZero /** \brief  Data Memory Barrier
387*10465441SEvalZero 
388*10465441SEvalZero     This function ensures the apparent order of the explicit memory operations before
389*10465441SEvalZero     and after the instruction, without ensuring their completion.
390*10465441SEvalZero  */
__DMB(void)391*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
392*10465441SEvalZero {
393*10465441SEvalZero   __ASM volatile ("dmb");
394*10465441SEvalZero }
395*10465441SEvalZero 
396*10465441SEvalZero 
397*10465441SEvalZero /** \brief  Reverse byte order (32 bit)
398*10465441SEvalZero 
399*10465441SEvalZero     This function reverses the byte order in integer value.
400*10465441SEvalZero 
401*10465441SEvalZero     \param [in]    value  Value to reverse
402*10465441SEvalZero     \return               Reversed value
403*10465441SEvalZero  */
__REV(uint32_t value)404*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
405*10465441SEvalZero {
406*10465441SEvalZero #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
407*10465441SEvalZero   return __builtin_bswap32(value);
408*10465441SEvalZero #else
409*10465441SEvalZero   uint32_t result;
410*10465441SEvalZero 
411*10465441SEvalZero   __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
412*10465441SEvalZero   return(result);
413*10465441SEvalZero #endif
414*10465441SEvalZero }
415*10465441SEvalZero 
416*10465441SEvalZero 
417*10465441SEvalZero /** \brief  Reverse byte order (16 bit)
418*10465441SEvalZero 
419*10465441SEvalZero     This function reverses the byte order in two unsigned short values.
420*10465441SEvalZero 
421*10465441SEvalZero     \param [in]    value  Value to reverse
422*10465441SEvalZero     \return               Reversed value
423*10465441SEvalZero  */
__REV16(uint32_t value)424*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
425*10465441SEvalZero {
426*10465441SEvalZero   uint32_t result;
427*10465441SEvalZero 
428*10465441SEvalZero   __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
429*10465441SEvalZero   return(result);
430*10465441SEvalZero }
431*10465441SEvalZero 
432*10465441SEvalZero 
433*10465441SEvalZero /** \brief  Reverse byte order in signed short value
434*10465441SEvalZero 
435*10465441SEvalZero     This function reverses the byte order in a signed short value with sign extension to integer.
436*10465441SEvalZero 
437*10465441SEvalZero     \param [in]    value  Value to reverse
438*10465441SEvalZero     \return               Reversed value
439*10465441SEvalZero  */
__REVSH(int32_t value)440*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
441*10465441SEvalZero {
442*10465441SEvalZero #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
443*10465441SEvalZero   return (short)__builtin_bswap16(value);
444*10465441SEvalZero #else
445*10465441SEvalZero   uint32_t result;
446*10465441SEvalZero 
447*10465441SEvalZero   __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
448*10465441SEvalZero   return(result);
449*10465441SEvalZero #endif
450*10465441SEvalZero }
451*10465441SEvalZero 
452*10465441SEvalZero 
453*10465441SEvalZero /** \brief  Rotate Right in unsigned value (32 bit)
454*10465441SEvalZero 
455*10465441SEvalZero     This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
456*10465441SEvalZero 
457*10465441SEvalZero     \param [in]    value  Value to rotate
458*10465441SEvalZero     \param [in]    value  Number of Bits to rotate
459*10465441SEvalZero     \return               Rotated value
460*10465441SEvalZero  */
__ROR(uint32_t op1,uint32_t op2)461*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
462*10465441SEvalZero {
463*10465441SEvalZero   return (op1 >> op2) | (op1 << (32 - op2));
464*10465441SEvalZero }
465*10465441SEvalZero 
466*10465441SEvalZero 
467*10465441SEvalZero /** \brief  Breakpoint
468*10465441SEvalZero 
469*10465441SEvalZero     This function causes the processor to enter Debug state.
470*10465441SEvalZero     Debug tools can use this to investigate system state when the instruction at a particular address is reached.
471*10465441SEvalZero 
472*10465441SEvalZero     \param [in]    value  is ignored by the processor.
473*10465441SEvalZero                    If required, a debugger can use it to store additional information about the breakpoint.
474*10465441SEvalZero  */
475*10465441SEvalZero #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
476*10465441SEvalZero 
477*10465441SEvalZero 
478*10465441SEvalZero #if       (__CORTEX_M >= 0x03)
479*10465441SEvalZero 
480*10465441SEvalZero /** \brief  Reverse bit order of value
481*10465441SEvalZero 
482*10465441SEvalZero     This function reverses the bit order of the given value.
483*10465441SEvalZero 
484*10465441SEvalZero     \param [in]    value  Value to reverse
485*10465441SEvalZero     \return               Reversed value
486*10465441SEvalZero  */
__RBIT(uint32_t value)487*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
488*10465441SEvalZero {
489*10465441SEvalZero   uint32_t result;
490*10465441SEvalZero 
491*10465441SEvalZero    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
492*10465441SEvalZero    return(result);
493*10465441SEvalZero }
494*10465441SEvalZero 
495*10465441SEvalZero 
496*10465441SEvalZero /** \brief  LDR Exclusive (8 bit)
497*10465441SEvalZero 
498*10465441SEvalZero     This function performs a exclusive LDR command for 8 bit value.
499*10465441SEvalZero 
500*10465441SEvalZero     \param [in]    ptr  Pointer to data
501*10465441SEvalZero     \return             value of type uint8_t at (*ptr)
502*10465441SEvalZero  */
__LDREXB(volatile uint8_t * addr)503*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
504*10465441SEvalZero {
505*10465441SEvalZero     uint32_t result;
506*10465441SEvalZero 
507*10465441SEvalZero #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
508*10465441SEvalZero    __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
509*10465441SEvalZero #else
510*10465441SEvalZero     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
511*10465441SEvalZero        accepted by assembler. So has to use following less efficient pattern.
512*10465441SEvalZero     */
513*10465441SEvalZero    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
514*10465441SEvalZero #endif
515*10465441SEvalZero    return(result);
516*10465441SEvalZero }
517*10465441SEvalZero 
518*10465441SEvalZero 
519*10465441SEvalZero /** \brief  LDR Exclusive (16 bit)
520*10465441SEvalZero 
521*10465441SEvalZero     This function performs a exclusive LDR command for 16 bit values.
522*10465441SEvalZero 
523*10465441SEvalZero     \param [in]    ptr  Pointer to data
524*10465441SEvalZero     \return        value of type uint16_t at (*ptr)
525*10465441SEvalZero  */
__LDREXH(volatile uint16_t * addr)526*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
527*10465441SEvalZero {
528*10465441SEvalZero     uint32_t result;
529*10465441SEvalZero 
530*10465441SEvalZero #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
531*10465441SEvalZero    __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
532*10465441SEvalZero #else
533*10465441SEvalZero     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
534*10465441SEvalZero        accepted by assembler. So has to use following less efficient pattern.
535*10465441SEvalZero     */
536*10465441SEvalZero    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
537*10465441SEvalZero #endif
538*10465441SEvalZero    return(result);
539*10465441SEvalZero }
540*10465441SEvalZero 
541*10465441SEvalZero 
542*10465441SEvalZero /** \brief  LDR Exclusive (32 bit)
543*10465441SEvalZero 
544*10465441SEvalZero     This function performs a exclusive LDR command for 32 bit values.
545*10465441SEvalZero 
546*10465441SEvalZero     \param [in]    ptr  Pointer to data
547*10465441SEvalZero     \return        value of type uint32_t at (*ptr)
548*10465441SEvalZero  */
__LDREXW(volatile uint32_t * addr)549*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
550*10465441SEvalZero {
551*10465441SEvalZero     uint32_t result;
552*10465441SEvalZero 
553*10465441SEvalZero    __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
554*10465441SEvalZero    return(result);
555*10465441SEvalZero }
556*10465441SEvalZero 
557*10465441SEvalZero 
558*10465441SEvalZero /** \brief  STR Exclusive (8 bit)
559*10465441SEvalZero 
560*10465441SEvalZero     This function performs a exclusive STR command for 8 bit values.
561*10465441SEvalZero 
562*10465441SEvalZero     \param [in]  value  Value to store
563*10465441SEvalZero     \param [in]    ptr  Pointer to location
564*10465441SEvalZero     \return          0  Function succeeded
565*10465441SEvalZero     \return          1  Function failed
566*10465441SEvalZero  */
__STREXB(uint8_t value,volatile uint8_t * addr)567*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
568*10465441SEvalZero {
569*10465441SEvalZero    uint32_t result;
570*10465441SEvalZero 
571*10465441SEvalZero    __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
572*10465441SEvalZero    return(result);
573*10465441SEvalZero }
574*10465441SEvalZero 
575*10465441SEvalZero 
576*10465441SEvalZero /** \brief  STR Exclusive (16 bit)
577*10465441SEvalZero 
578*10465441SEvalZero     This function performs a exclusive STR command for 16 bit values.
579*10465441SEvalZero 
580*10465441SEvalZero     \param [in]  value  Value to store
581*10465441SEvalZero     \param [in]    ptr  Pointer to location
582*10465441SEvalZero     \return          0  Function succeeded
583*10465441SEvalZero     \return          1  Function failed
584*10465441SEvalZero  */
__STREXH(uint16_t value,volatile uint16_t * addr)585*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
586*10465441SEvalZero {
587*10465441SEvalZero    uint32_t result;
588*10465441SEvalZero 
589*10465441SEvalZero    __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
590*10465441SEvalZero    return(result);
591*10465441SEvalZero }
592*10465441SEvalZero 
593*10465441SEvalZero 
594*10465441SEvalZero /** \brief  STR Exclusive (32 bit)
595*10465441SEvalZero 
596*10465441SEvalZero     This function performs a exclusive STR command for 32 bit values.
597*10465441SEvalZero 
598*10465441SEvalZero     \param [in]  value  Value to store
599*10465441SEvalZero     \param [in]    ptr  Pointer to location
600*10465441SEvalZero     \return          0  Function succeeded
601*10465441SEvalZero     \return          1  Function failed
602*10465441SEvalZero  */
__STREXW(uint32_t value,volatile uint32_t * addr)603*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
604*10465441SEvalZero {
605*10465441SEvalZero    uint32_t result;
606*10465441SEvalZero 
607*10465441SEvalZero    __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
608*10465441SEvalZero    return(result);
609*10465441SEvalZero }
610*10465441SEvalZero 
611*10465441SEvalZero 
612*10465441SEvalZero /** \brief  Remove the exclusive lock
613*10465441SEvalZero 
614*10465441SEvalZero     This function removes the exclusive lock which is created by LDREX.
615*10465441SEvalZero 
616*10465441SEvalZero  */
__CLREX(void)617*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
618*10465441SEvalZero {
619*10465441SEvalZero   __ASM volatile ("clrex" ::: "memory");
620*10465441SEvalZero }
621*10465441SEvalZero 
622*10465441SEvalZero 
623*10465441SEvalZero /** \brief  Signed Saturate
624*10465441SEvalZero 
625*10465441SEvalZero     This function saturates a signed value.
626*10465441SEvalZero 
627*10465441SEvalZero     \param [in]  value  Value to be saturated
628*10465441SEvalZero     \param [in]    sat  Bit position to saturate to (1..32)
629*10465441SEvalZero     \return             Saturated value
630*10465441SEvalZero  */
631*10465441SEvalZero #define __SSAT(ARG1,ARG2) \
632*10465441SEvalZero ({                          \
633*10465441SEvalZero   uint32_t __RES, __ARG1 = (ARG1); \
634*10465441SEvalZero   __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
635*10465441SEvalZero   __RES; \
636*10465441SEvalZero  })
637*10465441SEvalZero 
638*10465441SEvalZero 
639*10465441SEvalZero /** \brief  Unsigned Saturate
640*10465441SEvalZero 
641*10465441SEvalZero     This function saturates an unsigned value.
642*10465441SEvalZero 
643*10465441SEvalZero     \param [in]  value  Value to be saturated
644*10465441SEvalZero     \param [in]    sat  Bit position to saturate to (0..31)
645*10465441SEvalZero     \return             Saturated value
646*10465441SEvalZero  */
647*10465441SEvalZero #define __USAT(ARG1,ARG2) \
648*10465441SEvalZero ({                          \
649*10465441SEvalZero   uint32_t __RES, __ARG1 = (ARG1); \
650*10465441SEvalZero   __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
651*10465441SEvalZero   __RES; \
652*10465441SEvalZero  })
653*10465441SEvalZero 
654*10465441SEvalZero 
655*10465441SEvalZero /** \brief  Count leading zeros
656*10465441SEvalZero 
657*10465441SEvalZero     This function counts the number of leading zeros of a data value.
658*10465441SEvalZero 
659*10465441SEvalZero     \param [in]  value  Value to count the leading zeros
660*10465441SEvalZero     \return             number of leading zeros in value
661*10465441SEvalZero  */
__CLZ(uint32_t value)662*10465441SEvalZero __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
663*10465441SEvalZero {
664*10465441SEvalZero    uint32_t result;
665*10465441SEvalZero 
666*10465441SEvalZero   __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
667*10465441SEvalZero   return(result);
668*10465441SEvalZero }
669*10465441SEvalZero 
670*10465441SEvalZero #endif /* (__CORTEX_M >= 0x03) */
671*10465441SEvalZero 
672*10465441SEvalZero 
673*10465441SEvalZero 
674*10465441SEvalZero 
675*10465441SEvalZero #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
676*10465441SEvalZero /* TASKING carm specific functions */
677*10465441SEvalZero 
678*10465441SEvalZero /*
679*10465441SEvalZero  * The CMSIS functions have been implemented as intrinsics in the compiler.
680*10465441SEvalZero  * Please use "carm -?i" to get an up to date list of all intrinsics,
681*10465441SEvalZero  * Including the CMSIS ones.
682*10465441SEvalZero  */
683*10465441SEvalZero 
684*10465441SEvalZero #endif
685*10465441SEvalZero 
686*10465441SEvalZero /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
687*10465441SEvalZero 
688*10465441SEvalZero #endif /* __CORE_CMINSTR_H */
689