xref: /aosp_15_r20/external/arm-trusted-firmware/include/arch/aarch64/asm_macros.S (revision 54fd6939e177f8ff529b10183254802c76df6d08)
1*54fd6939SJiyong Park/*
2*54fd6939SJiyong Park * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3*54fd6939SJiyong Park *
4*54fd6939SJiyong Park * SPDX-License-Identifier: BSD-3-Clause
5*54fd6939SJiyong Park */
6*54fd6939SJiyong Park#ifndef ASM_MACROS_S
7*54fd6939SJiyong Park#define ASM_MACROS_S
8*54fd6939SJiyong Park
9*54fd6939SJiyong Park#include <arch.h>
10*54fd6939SJiyong Park#include <common/asm_macros_common.S>
11*54fd6939SJiyong Park#include <lib/spinlock.h>
12*54fd6939SJiyong Park
13*54fd6939SJiyong Park/*
14*54fd6939SJiyong Park * TLBI instruction with type specifier that implements the workaround for
15*54fd6939SJiyong Park * errata 813419 of Cortex-A57 or errata 1286807 of Cortex-A76.
16*54fd6939SJiyong Park */
17*54fd6939SJiyong Park#if ERRATA_A57_813419 || ERRATA_A76_1286807
18*54fd6939SJiyong Park#define TLB_INVALIDATE(_type) \
19*54fd6939SJiyong Park	tlbi	_type; \
20*54fd6939SJiyong Park	dsb	ish; \
21*54fd6939SJiyong Park	tlbi	_type
22*54fd6939SJiyong Park#else
23*54fd6939SJiyong Park#define TLB_INVALIDATE(_type) \
24*54fd6939SJiyong Park	tlbi	_type
25*54fd6939SJiyong Park#endif
26*54fd6939SJiyong Park
27*54fd6939SJiyong Park
28*54fd6939SJiyong Park	.macro	func_prologue
29*54fd6939SJiyong Park	stp	x29, x30, [sp, #-0x10]!
30*54fd6939SJiyong Park	mov	x29,sp
31*54fd6939SJiyong Park	.endm
32*54fd6939SJiyong Park
33*54fd6939SJiyong Park	.macro	func_epilogue
34*54fd6939SJiyong Park	ldp	x29, x30, [sp], #0x10
35*54fd6939SJiyong Park	.endm
36*54fd6939SJiyong Park
37*54fd6939SJiyong Park
38*54fd6939SJiyong Park	.macro	dcache_line_size  reg, tmp
39*54fd6939SJiyong Park	mrs	\tmp, ctr_el0
40*54fd6939SJiyong Park	ubfx	\tmp, \tmp, #16, #4
41*54fd6939SJiyong Park	mov	\reg, #4
42*54fd6939SJiyong Park	lsl	\reg, \reg, \tmp
43*54fd6939SJiyong Park	.endm
44*54fd6939SJiyong Park
45*54fd6939SJiyong Park
46*54fd6939SJiyong Park	.macro	icache_line_size  reg, tmp
47*54fd6939SJiyong Park	mrs	\tmp, ctr_el0
48*54fd6939SJiyong Park	and	\tmp, \tmp, #0xf
49*54fd6939SJiyong Park	mov	\reg, #4
50*54fd6939SJiyong Park	lsl	\reg, \reg, \tmp
51*54fd6939SJiyong Park	.endm
52*54fd6939SJiyong Park
53*54fd6939SJiyong Park
54*54fd6939SJiyong Park	.macro	smc_check  label
55*54fd6939SJiyong Park	mrs	x0, esr_el3
56*54fd6939SJiyong Park	ubfx	x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH
57*54fd6939SJiyong Park	cmp	x0, #EC_AARCH64_SMC
58*54fd6939SJiyong Park	b.ne	$label
59*54fd6939SJiyong Park	.endm
60*54fd6939SJiyong Park
61*54fd6939SJiyong Park	/*
62*54fd6939SJiyong Park	 * Declare the exception vector table, enforcing it is aligned on a
63*54fd6939SJiyong Park	 * 2KB boundary, as required by the ARMv8 architecture.
64*54fd6939SJiyong Park	 * Use zero bytes as the fill value to be stored in the padding bytes
65*54fd6939SJiyong Park	 * so that it inserts illegal AArch64 instructions. This increases
66*54fd6939SJiyong Park	 * security, robustness and potentially facilitates debugging.
67*54fd6939SJiyong Park	 */
68*54fd6939SJiyong Park	.macro vector_base  label, section_name=.vectors
69*54fd6939SJiyong Park	.section \section_name, "ax"
70*54fd6939SJiyong Park	.align 11, 0
71*54fd6939SJiyong Park	\label:
72*54fd6939SJiyong Park	.endm
73*54fd6939SJiyong Park
74*54fd6939SJiyong Park	/*
75*54fd6939SJiyong Park	 * Create an entry in the exception vector table, enforcing it is
76*54fd6939SJiyong Park	 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
77*54fd6939SJiyong Park	 * Use zero bytes as the fill value to be stored in the padding bytes
78*54fd6939SJiyong Park	 * so that it inserts illegal AArch64 instructions. This increases
79*54fd6939SJiyong Park	 * security, robustness and potentially facilitates debugging.
80*54fd6939SJiyong Park	 */
81*54fd6939SJiyong Park	.macro vector_entry  label, section_name=.vectors
82*54fd6939SJiyong Park	.cfi_sections .debug_frame
83*54fd6939SJiyong Park	.section \section_name, "ax"
84*54fd6939SJiyong Park	.align 7, 0
85*54fd6939SJiyong Park	.type \label, %function
86*54fd6939SJiyong Park	.cfi_startproc
87*54fd6939SJiyong Park	\label:
88*54fd6939SJiyong Park	.endm
89*54fd6939SJiyong Park
90*54fd6939SJiyong Park	/*
91*54fd6939SJiyong Park	 * Add the bytes until fill the full exception vector, whose size is always
92*54fd6939SJiyong Park	 * 32 instructions. If there are more than 32 instructions in the
93*54fd6939SJiyong Park	 * exception vector then an error is emitted.
94*54fd6939SJiyong Park	 */
95*54fd6939SJiyong Park	.macro end_vector_entry label
96*54fd6939SJiyong Park	.cfi_endproc
97*54fd6939SJiyong Park	.fill	\label + (32 * 4) - .
98*54fd6939SJiyong Park	.endm
99*54fd6939SJiyong Park
100*54fd6939SJiyong Park	/*
101*54fd6939SJiyong Park	 * This macro calculates the base address of the current CPU's MP stack
102*54fd6939SJiyong Park	 * using the plat_my_core_pos() index, the name of the stack storage
103*54fd6939SJiyong Park	 * and the size of each stack
104*54fd6939SJiyong Park	 * Out: X0 = physical address of stack base
105*54fd6939SJiyong Park	 * Clobber: X30, X1, X2
106*54fd6939SJiyong Park	 */
107*54fd6939SJiyong Park	.macro get_my_mp_stack _name, _size
108*54fd6939SJiyong Park	bl	plat_my_core_pos
109*54fd6939SJiyong Park	adrp	x2, (\_name + \_size)
110*54fd6939SJiyong Park	add	x2, x2, :lo12:(\_name + \_size)
111*54fd6939SJiyong Park	mov x1, #\_size
112*54fd6939SJiyong Park	madd x0, x0, x1, x2
113*54fd6939SJiyong Park	.endm
114*54fd6939SJiyong Park
115*54fd6939SJiyong Park	/*
116*54fd6939SJiyong Park	 * This macro calculates the base address of a UP stack using the
117*54fd6939SJiyong Park	 * name of the stack storage and the size of the stack
118*54fd6939SJiyong Park	 * Out: X0 = physical address of stack base
119*54fd6939SJiyong Park	 */
120*54fd6939SJiyong Park	.macro get_up_stack _name, _size
121*54fd6939SJiyong Park	adrp	x0, (\_name + \_size)
122*54fd6939SJiyong Park	add	x0, x0, :lo12:(\_name + \_size)
123*54fd6939SJiyong Park	.endm
124*54fd6939SJiyong Park
125*54fd6939SJiyong Park	/*
126*54fd6939SJiyong Park	 * Helper macro to generate the best mov/movk combinations according
127*54fd6939SJiyong Park	 * the value to be moved. The 16 bits from '_shift' are tested and
128*54fd6939SJiyong Park	 * if not zero, they are moved into '_reg' without affecting
129*54fd6939SJiyong Park	 * other bits.
130*54fd6939SJiyong Park	 */
131*54fd6939SJiyong Park	.macro _mov_imm16 _reg, _val, _shift
132*54fd6939SJiyong Park		.if (\_val >> \_shift) & 0xffff
133*54fd6939SJiyong Park			.if (\_val & (1 << \_shift - 1))
134*54fd6939SJiyong Park				movk	\_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
135*54fd6939SJiyong Park			.else
136*54fd6939SJiyong Park				mov	\_reg, \_val & (0xffff << \_shift)
137*54fd6939SJiyong Park			.endif
138*54fd6939SJiyong Park		.endif
139*54fd6939SJiyong Park	.endm
140*54fd6939SJiyong Park
141*54fd6939SJiyong Park	/*
142*54fd6939SJiyong Park	 * Helper macro to load arbitrary values into 32 or 64-bit registers
143*54fd6939SJiyong Park	 * which generates the best mov/movk combinations. Many base addresses
144*54fd6939SJiyong Park	 * are 64KB aligned the macro will eliminate updating bits 15:0 in
145*54fd6939SJiyong Park	 * that case
146*54fd6939SJiyong Park	 */
147*54fd6939SJiyong Park	.macro mov_imm _reg, _val
148*54fd6939SJiyong Park		.if (\_val) == 0
149*54fd6939SJiyong Park			mov	\_reg, #0
150*54fd6939SJiyong Park		.else
151*54fd6939SJiyong Park			_mov_imm16	\_reg, (\_val), 0
152*54fd6939SJiyong Park			_mov_imm16	\_reg, (\_val), 16
153*54fd6939SJiyong Park			_mov_imm16	\_reg, (\_val), 32
154*54fd6939SJiyong Park			_mov_imm16	\_reg, (\_val), 48
155*54fd6939SJiyong Park		.endif
156*54fd6939SJiyong Park	.endm
157*54fd6939SJiyong Park
158*54fd6939SJiyong Park	/*
159*54fd6939SJiyong Park	 * Macro to mark instances where we're jumping to a function and don't
160*54fd6939SJiyong Park	 * expect a return. To provide the function being jumped to with
161*54fd6939SJiyong Park	 * additional information, we use 'bl' instruction to jump rather than
162*54fd6939SJiyong Park	 * 'b'.
163*54fd6939SJiyong Park         *
164*54fd6939SJiyong Park	 * Debuggers infer the location of a call from where LR points to, which
165*54fd6939SJiyong Park	 * is usually the instruction after 'bl'. If this macro expansion
166*54fd6939SJiyong Park	 * happens to be the last location in a function, that'll cause the LR
167*54fd6939SJiyong Park	 * to point a location beyond the function, thereby misleading debugger
168*54fd6939SJiyong Park	 * back trace. We therefore insert a 'nop' after the function call for
169*54fd6939SJiyong Park	 * debug builds, unless 'skip_nop' parameter is non-zero.
170*54fd6939SJiyong Park	 */
171*54fd6939SJiyong Park	.macro no_ret _func:req, skip_nop=0
172*54fd6939SJiyong Park	bl	\_func
173*54fd6939SJiyong Park#if DEBUG
174*54fd6939SJiyong Park	.ifeq \skip_nop
175*54fd6939SJiyong Park	nop
176*54fd6939SJiyong Park	.endif
177*54fd6939SJiyong Park#endif
178*54fd6939SJiyong Park	.endm
179*54fd6939SJiyong Park
180*54fd6939SJiyong Park	/*
181*54fd6939SJiyong Park	 * Reserve space for a spin lock in assembly file.
182*54fd6939SJiyong Park	 */
183*54fd6939SJiyong Park	.macro define_asm_spinlock _name:req
184*54fd6939SJiyong Park	.align	SPINLOCK_ASM_ALIGN
185*54fd6939SJiyong Park	\_name:
186*54fd6939SJiyong Park	.space	SPINLOCK_ASM_SIZE
187*54fd6939SJiyong Park	.endm
188*54fd6939SJiyong Park
189*54fd6939SJiyong Park#if RAS_EXTENSION
190*54fd6939SJiyong Park	.macro esb
191*54fd6939SJiyong Park	.inst	0xd503221f
192*54fd6939SJiyong Park	.endm
193*54fd6939SJiyong Park#endif
194*54fd6939SJiyong Park
195*54fd6939SJiyong Park	/*
196*54fd6939SJiyong Park	 * Helper macro to read system register value into x0
197*54fd6939SJiyong Park	 */
198*54fd6939SJiyong Park	.macro	read reg:req
199*54fd6939SJiyong Park#if ENABLE_BTI
200*54fd6939SJiyong Park	bti	j
201*54fd6939SJiyong Park#endif
202*54fd6939SJiyong Park	mrs	x0, \reg
203*54fd6939SJiyong Park	ret
204*54fd6939SJiyong Park	.endm
205*54fd6939SJiyong Park
206*54fd6939SJiyong Park	/*
207*54fd6939SJiyong Park	 * Helper macro to write value from x1 to system register
208*54fd6939SJiyong Park	 */
209*54fd6939SJiyong Park	.macro	write reg:req
210*54fd6939SJiyong Park#if ENABLE_BTI
211*54fd6939SJiyong Park	bti	j
212*54fd6939SJiyong Park#endif
213*54fd6939SJiyong Park	msr	\reg, x1
214*54fd6939SJiyong Park	ret
215*54fd6939SJiyong Park	.endm
216*54fd6939SJiyong Park
217*54fd6939SJiyong Park	/*
218*54fd6939SJiyong Park	 * Macro for mitigating against speculative execution beyond ERET. Uses the
219*54fd6939SJiyong Park	 * speculation barrier instruction introduced by FEAT_SB, if it's enabled.
220*54fd6939SJiyong Park	 */
221*54fd6939SJiyong Park	.macro exception_return
222*54fd6939SJiyong Park	eret
223*54fd6939SJiyong Park#if ENABLE_FEAT_SB
224*54fd6939SJiyong Park	sb
225*54fd6939SJiyong Park#else
226*54fd6939SJiyong Park	dsb	nsh
227*54fd6939SJiyong Park	isb
228*54fd6939SJiyong Park#endif
229*54fd6939SJiyong Park	.endm
230*54fd6939SJiyong Park
231*54fd6939SJiyong Park#endif /* ASM_MACROS_S */
232