xref: /aosp_15_r20/art/runtime/arch/x86/jni_entrypoints_x86.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_x86.S"
18
19#define MANAGED_ARGS_SAVE_SIZE /*xmm0-xmm3*/ 4 * 8 + /*padding*/ 4 + /* GPR args */ 4 * 4
20
21// Save register args and adds space for outgoing arguments.
22// With `call_args_space = 0`, the ESP shall be 8-byte aligned but not 16-byte aligned,
23// so either the `call_args_space` should be 8 (or 24, 40, ...) or the user of the macro
24// needs to adjust the ESP explicitly afterwards.
25MACRO1(SAVE_MANAGED_ARGS_INCREASE_FRAME, call_args_space)
26    // Return address is on the stack.
27    PUSH_ARG ebx
28    PUSH_ARG edx
29    PUSH_ARG ecx
30    PUSH_ARG eax
31    // Make xmm<n> spill slots 8-byte aligned.
32    INCREASE_FRAME (\call_args_space + /*FPRs*/ 4 * 8 + /*padding*/ 4)
33    movsd %xmm0, \call_args_space + 0(%esp)
34    movsd %xmm1, \call_args_space + 8(%esp)
35    movsd %xmm2, \call_args_space + 16(%esp)
36    movsd %xmm3, \call_args_space + 24(%esp)
37END_MACRO
38
39MACRO1(RESTORE_MANAGED_ARGS_DECREASE_FRAME, call_args_space)
40    movsd \call_args_space + 0(%esp), %xmm0
41    movsd \call_args_space + 8(%esp), %xmm1
42    movsd \call_args_space + 16(%esp), %xmm2
43    movsd \call_args_space + 24(%esp), %xmm3
44    DECREASE_FRAME \call_args_space + /*FPR args*/ 4 * 8 + /*padding*/ 4
45    POP_ARG eax
46    POP_ARG ecx
47    POP_ARG edx
48    POP_ARG ebx
49END_MACRO
50
51MACRO3(JNI_SAVE_MANAGED_ARGS_TRAMPOLINE, name, cxx_name, arg1)
52DEFINE_FUNCTION \name
53    // Note: Managed callee-save registers have been saved by the JNI stub.
54    // Save register args EAX, ECX, EDX, EBX, mmx0-mmx3, add and padding above `arg1`.
55    SAVE_MANAGED_ARGS_INCREASE_FRAME /*padding*/ 4
56    // Call `cxx_name()`.
57    PUSH_ARG RAW_VAR(arg1)        // Pass arg1.
58    call CALLVAR(cxx_name)        // Call cxx_name(...).
59    // Restore register args EAX, ECX, EDX, EBX, mmx0-mmx3 and return.
60    RESTORE_MANAGED_ARGS_DECREASE_FRAME /*arg1*/ 4 + /*padding*/ 4
61    ret
62END_FUNCTION \name
63END_MACRO
64
65MACRO4(JNI_SAVE_RETURN_VALUE_TRAMPOLINE, name, cxx_name, arg1, arg2)
66DEFINE_FUNCTION \name
67    // Save return registers.
68    PUSH_ARG edx
69    PUSH_ARG eax
70    .ifnc \arg2, none
71        INCREASE_FRAME /*mmx0*/ 8 + /*padding*/ 4
72        movsd %xmm0, 0(%esp)
73        PUSH_ARG RAW_VAR(arg2)    // Pass arg2.
74    .else
75        INCREASE_FRAME /*padding*/ 4 + /*mmx0*/ 8 + /*padding*/ 4
76        movsd %xmm0, 4(%esp)
77    .endif
78    // Call `cxx_name()`.
79    PUSH_ARG RAW_VAR(arg1)        // Pass arg1.
80    call CALLVAR(cxx_name)        // Call cxx_name(...).
81    // Restore return registers and return.
82    movsd 8(%esp), %xmm0
83    DECREASE_FRAME /*call args*/ 8 + /*xmm0*/ 8 + /*padding*/ 4
84    POP_ARG eax
85    POP_ARG edx
86    ret
87END_FUNCTION \name
88END_MACRO
89
90    /*
91     * Jni dlsym lookup stub for @CriticalNative.
92     */
93DEFINE_FUNCTION art_jni_dlsym_lookup_critical_stub
94    // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is eax.
95    // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub.
96    testl LITERAL(1), %eax
97    jnz art_jni_dlsym_lookup_stub
98
99    // Since the native call args are all on the stack, we can use the managed args
100    // registers as scratch registers. So, EBX, EDX and ECX are available.
101
102    // Load caller PC.
103    movl (%esp), %ecx
104
105    // Save the caller method from the hidden arg.
106    PUSH_ARG eax
107
108    // Call artCriticalNativeFrameSize(method, caller_pc).
109    PUSH_ARG ecx                  // Pass caller PC.
110    PUSH_ARG eax                  // Pass method.
111    call SYMBOL(artCriticalNativeFrameSize)  // (method, caller_pc)
112    DECREASE_FRAME 8              // Remove args.
113
114    // Restore method register to EBX.
115    POP_ARG ebx
116
117    // Load caller PC to EDX and redefine return PC for CFI.
118    movl (%esp), %edx
119    CFI_REGISTER(%eip, %edx)
120
121    // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime
122    // method or for a GenericJNI frame which is similar but has a native method and a tag.
123    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__
124
125    // Calculate the number of DWORDs to move.
126    movl %eax, %ecx
127    shrl LITERAL(2), %ecx
128    jecxz .Lcritical_skip_copy_args
129
130    // Save EDI, ESI so that we can use them for moving stack args.
131    PUSH edi
132    PUSH esi
133
134    // Move the stack args.
135    leal 2 * __SIZEOF_POINTER__(%esp), %edi
136    leal FRAME_SIZE_SAVE_REFS_AND_ARGS(%edi), %esi
137    rep movsd
138
139    // Restore EDI, ESI.
140    POP esi
141    POP edi
142
143.Lcritical_skip_copy_args:
144    // Calculate the base address of the managed frame.
145    leal (%esp, %eax, 1), %eax
146
147    leal 1(%eax), %ecx            // Prepare managed SP tagged for a GenericJNI frame.
148    testl LITERAL(ACCESS_FLAGS_METHOD_IS_NATIVE), ART_METHOD_ACCESS_FLAGS_OFFSET(%ebx)
149    jnz .Lcritical_skip_prepare_runtime_method
150
151    // Save the return PC for managed stack walk.
152    // (When coming from a compiled stub, the correct return PC is already there.)
153    movl %edx, FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__(%eax)
154
155    // Replace the target method with the SaveRefsAndArgs runtime method.
156    LOAD_RUNTIME_INSTANCE ebx
157    movl RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET(%ebx), %ebx
158
159    movl %eax, %ecx               // Prepare untagged managed SP for the runtime method.
160
161.Lcritical_skip_prepare_runtime_method:
162    // Store the method on the bottom of the managed frame.
163    movl %ebx, (%eax)
164
165    // Move the managed frame address to native callee-save register EBX.
166    movl %eax, %ebx
167
168    // Spill registers for the SaveRefsAndArgs frame above the stack args.
169    movl %edi, 56(%ebx)
170    CFI_EXPRESSION_BREG CFI_REG(edi), CFI_REG(ebx), 56
171    movl %esi, 52(%ebx)
172    CFI_EXPRESSION_BREG CFI_REG(esi), CFI_REG(ebx), 52
173    movl %ebp, 48(%ebx)
174    CFI_EXPRESSION_BREG CFI_REG(ebp), CFI_REG(ebx), 48
175    // Skip managed ABI args EBX, EDX, ECX and FPRs. The runtime shall not examine the
176    // args in the managed frame. (We have already clobbered EBX, EDX, ECX anyway.)
177
178    // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame.
179    movl %ecx, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
180
181    // Save our return PC in a slot reserved for first FP arg in managed ABI.
182    movl %edx, __SIZEOF_POINTER__(%ebx)
183    CFI_EXPRESSION_BREG CFI_REG(eip), CFI_REG(ebx), __SIZEOF_POINTER__
184
185    // Call artFindNativeMethodRunnable()
186    INCREASE_FRAME 12             // Align stack.
187    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
188    CFI_ADJUST_CFA_OFFSET(4)
189    call SYMBOL(artFindNativeMethodRunnable)  // (Thread*)
190    addl LITERAL(16), %esp
191    CFI_ADJUST_CFA_OFFSET(-16)
192
193    // Check for exception.
194    test %eax, %eax
195    CFI_REMEMBER_STATE
196    jz .Lcritical_deliver_exception
197
198    // Remember our return PC in EDX.
199    movl __SIZEOF_POINTER__(%ebx), %edx
200    CFI_REGISTER(%eip, %edx)
201
202    // Restore callee-save registers from the frame. We shall not need the method anymore.
203    movl 48(%ebx), %ebp
204    CFI_RESTORE(%ebp)
205    movl 52(%ebx), %esi
206    CFI_RESTORE(%esi)
207    movl 56(%ebx), %edi
208    CFI_RESTORE(%edi)
209
210    // Calculate the number of DWORDs to move.
211    movl %ebx, %ecx
212    subl %esp, %ecx
213    shrl LITERAL(2), %ecx
214    jecxz .Lcritical_skip_copy_args_back
215
216    // Save EDI, ESI so that we can use them for moving stack args.
217    PUSH edi
218    PUSH esi
219
220    // Move stack args to their original place.
221    leal -__SIZEOF_POINTER__(%ebx), %esi
222    leal FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__(%ebx), %edi
223    std
224    rep movsd
225    cld
226
227    // Restore EDI, ESI.
228    POP esi
229    POP edi
230
231.Lcritical_skip_copy_args_back:
232    // Remove the frame reservation.
233    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__
234
235    // Store our return PC.
236    movl %edx, (%esp)
237    CFI_REL_OFFSET(%eip, 0)
238
239    // Do the tail call.
240    jmp *%eax
241
242.Lcritical_deliver_exception:
243    CFI_RESTORE_STATE_AND_DEF_CFA %esp, FRAME_SIZE_SAVE_REFS_AND_ARGS
244    DELIVER_PENDING_EXCEPTION_FRAME_READY
245END_FUNCTION art_jni_dlsym_lookup_critical_stub
246
247    /*
248     * Read barrier for the method's declaring class needed by JNI stub for static methods.
249     * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.)
250     */
251JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier, eax
252
253    /*
254     * Trampoline to `artJniMethodStart()` that preserves all managed arguments.
255     */
256JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, fs:THREAD_SELF_OFFSET
257
258    /*
259     * Trampoline to `artJniMethodEntryHook` that preserves all managed arguments.
260     */
261JNI_SAVE_MANAGED_ARGS_TRAMPOLINE \
262    art_jni_method_entry_hook, artJniMethodEntryHook, fs:THREAD_SELF_OFFSET
263
264    /*
265     * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments.
266     */
267JNI_SAVE_MANAGED_ARGS_TRAMPOLINE \
268    art_jni_monitored_method_start, artJniMonitoredMethodStart, fs:THREAD_SELF_OFFSET
269
270    /*
271     * Trampoline to `artJniMethodEnd()` that preserves all return registers.
272     */
273JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, fs:THREAD_SELF_OFFSET, none
274
275    /*
276     * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers.
277     */
278JNI_SAVE_RETURN_VALUE_TRAMPOLINE \
279    art_jni_monitored_method_end, artJniMonitoredMethodEnd, fs:THREAD_SELF_OFFSET, none
280
281    /*
282     * Entry from JNI stub that tries to lock the object in a fast path and
283     * calls `artLockObjectFromCode()` (the same as for managed code) for the
284     * difficult cases, may block for GC.
285     * Custom calling convention:
286     *     EBP holds the non-null object to lock.
287     *     Callee-save registers have been saved and can be used as temporaries (except EBP).
288     *     All argument registers need to be preserved.
289     */
290DEFINE_FUNCTION art_jni_lock_object
291    movl %eax, %edi                       // Preserve EAX in a callee-save register.
292    LOCK_OBJECT_FAST_PATH ebp, esi, /*saved_eax*/ edi .Llock_object_jni_slow
293
294.Llock_object_jni_slow:
295    movl %edi, %eax                       // Restore EAX.
296    jmp  SYMBOL(art_jni_lock_object_no_inline)
297END_FUNCTION art_jni_lock_object
298
299    /*
300     * Entry from JNI stub that calls `artLockObjectFromCode()`
301     * (the same as for managed code), may block for GC.
302     * Custom calling convention:
303     *     EBP holds the non-null object to lock.
304     *     Callee-save registers have been saved and can be used as temporaries (except EBP).
305     *     All argument registers need to be preserved.
306     */
307DEFINE_FUNCTION art_jni_lock_object_no_inline
308    // This is also the slow path for art_jni_lock_object.
309    // Save register args EAX, ECX, EDX, EBX, mmx0-mmx3; original value of EAX is in EDI.
310    SAVE_MANAGED_ARGS_INCREASE_FRAME /*call_args_space*/ 0
311    // Note: The stack is not 16-byte aligned here but it shall be after pushing args for the call.
312    // Call `artLockObjectFromCode()`
313    pushl %fs:THREAD_SELF_OFFSET          // Pass Thread::Current().
314    CFI_ADJUST_CFA_OFFSET(4)
315    PUSH_ARG ebp                          // Pass the object to lock.
316    call SYMBOL(artLockObjectFromCode)    // (object, Thread*)
317    // Check result.
318    testl %eax, %eax
319    jnz   1f
320    // Restore register args EAX, ECX, EDX, EBX, mmx0-mmx3 and return.
321    RESTORE_MANAGED_ARGS_DECREASE_FRAME /*call_args_space*/ 8
322    ret
323    .cfi_adjust_cfa_offset (/*call args*/ 8 + MANAGED_ARGS_SAVE_SIZE)
3241:
325    // All args are irrelevant when throwing an exception.
326    // Remove the spill area except for new padding to align stack.
327    DECREASE_FRAME (/*call args*/ 8 + MANAGED_ARGS_SAVE_SIZE - /*new padding*/ 8)
328    // Rely on the JNI transition frame constructed in the JNI stub.
329    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
330    CFI_ADJUST_CFA_OFFSET(4)
331    call SYMBOL(artDeliverPendingExceptionFromCode)  // (Thread*)
332    call SYMBOL(art_quick_do_long_jump)
333    UNREACHABLE
334END_FUNCTION art_jni_lock_object_no_inline
335
336    /*
337     * Entry from JNI stub that tries to unlock the object in a fast path and calls
338     * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock
339     * is fatal, so we do not need to check for exceptions in the slow path.
340     * Custom calling convention:
341     *     EBP holds the non-null object to unlock.
342     *     Callee-save registers have been saved and can be used as temporaries (except EBP).
343     *     Return registers EAX, EDX and mmx0 need to be preserved.
344     */
345DEFINE_FUNCTION art_jni_unlock_object
346    movl %eax, %edi                       // Preserve EAX in a different register.
347    UNLOCK_OBJECT_FAST_PATH ebp, esi, /*saved_eax*/ edi, .Lunlock_object_jni_slow
348
349 .Lunlock_object_jni_slow:
350    movl %edi, %eax                       // Restore EAX.
351    jmp  SYMBOL(art_jni_unlock_object_no_inline)
352END_FUNCTION art_jni_unlock_object
353
354    /*
355     * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to
356     * unlock is fatal, so we do not need to check for exceptions.
357     * Custom calling convention:
358     *     EBP holds the non-null object to unlock.
359     *     Callee-save registers have been saved and can be used as temporaries (except EBP).
360     *     Return registers EAX, EDX and mmx0 need to be preserved.
361     */
362    // This is also the slow path for art_jni_unlock_object.
363JNI_SAVE_RETURN_VALUE_TRAMPOLINE \
364    art_jni_unlock_object_no_inline, artJniUnlockObject, ebp, fs:THREAD_SELF_OFFSET
365