xref: /aosp_15_r20/art/runtime/arch/arm64/jni_entrypoints_arm64.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19.macro JNI_SAVE_MANAGED_ARGS_TRAMPOLINE name, cxx_name, arg1 = "none"
20    .extern \cxx_name
21ENTRY \name
22    // Save args and LR.
23    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*LR*/ 8
24    str    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
25    .cfi_rel_offset lr, ALL_ARGS_SIZE + /*padding*/ 8
26    // Call `cxx_name()`.
27    .ifnc \arg1, none
28        mov x0, \arg1                          // Pass arg1.
29    .endif
30    bl     \cxx_name                           // Call cxx_name(...).
31    // Restore LR and args and return.
32    ldr    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
33    .cfi_restore lr
34    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*LR*/ 8
35    ret
36END \name
37.endm
38
39.macro JNI_SAVE_RETURN_VALUE_TRAMPOLINE name, cxx_name, arg1, arg2 = "none"
40    .extern \cxx_name
41ENTRY \name
42    // Save return registers and return address.
43    stp    x0, lr, [sp, #-32]!
44    .cfi_adjust_cfa_offset 32
45    .cfi_rel_offset lr, 8
46    str    d0, [sp, #16]
47    // Call `cxx_name()`.
48    mov    x0, \arg1                           // Pass arg1.
49    .ifnc \arg2, none
50        mov x1, \arg2                          // Pass arg2.
51    .endif
52    bl     \cxx_name                           // Call cxx_name(...).
53    // Restore return registers and return.
54    ldr    d0, [sp, #16]
55    ldp    x0, lr, [sp], #32
56    .cfi_adjust_cfa_offset -32
57    .cfi_restore lr
58    ret
59END \name
60.endm
61
62    /*
63     * Jni dlsym lookup stub for @CriticalNative.
64     */
65ENTRY art_jni_dlsym_lookup_critical_stub
66    // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is x15.
67    // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub.
68    // Note: 'tbnz' doesn't always have enough range (+/-32KB) to reach art_jni_dlsym_lookup_stub
69    // so 'b' (+/-128MB) is used instead.
70    tbz  x15, #0, .Lcritical_not_generic_jni
71    b art_jni_dlsym_lookup_stub
72
73.Lcritical_not_generic_jni:
74    // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg.
75    SAVE_ALL_ARGS_INCREASE_FRAME 2 * 8
76    stp   x15, lr, [sp, #ALL_ARGS_SIZE]
77    .cfi_rel_offset lr, ALL_ARGS_SIZE + 8
78
79    // Call artCriticalNativeFrameSize(method, caller_pc)
80    mov   x0, x15  // x0 := method (from hidden arg)
81    mov   x1, lr   // x1 := caller_pc
82    bl    artCriticalNativeFrameSize
83
84    // Move frame size to x14.
85    mov   x14, x0
86
87    // Restore args, the hidden arg and caller PC.
88    ldp   x15, lr, [sp, #128]
89    .cfi_restore lr
90    RESTORE_ALL_ARGS_DECREASE_FRAME 2 * 8
91
92    // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime
93    // method or for a GenericJNI frame which is similar but has a native method and a tag.
94    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
95
96    // Calculate the base address of the managed frame.
97    add   x13, sp, x14
98
99    // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame.
100    // If we're coming from JNI stub with tail call, it is LR. If we're coming from
101    // JNI stub that saved the return address, it will be the last value we copy below.
102    // If we're coming directly from compiled code, it is LR, set further down.
103    mov   xIP1, lr
104
105    // Move the stack args if any.
106    cbz   x14, .Lcritical_skip_copy_args
107    mov   x12, sp
108.Lcritical_copy_args_loop:
109    ldp   xIP0, xIP1, [x12, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
110    subs  x14, x14, #16
111    stp   xIP0, xIP1, [x12], #16
112    bne   .Lcritical_copy_args_loop
113.Lcritical_skip_copy_args:
114
115    // Spill registers for the SaveRefsAndArgs frame above the stack args.
116    // Note that the runtime shall not examine the args here, otherwise we would have to
117    // move them in registers and stack to account for the difference between managed and
118    // native ABIs. Do not update CFI while we hold the frame address in x13 and the values
119    // in registers are unchanged.
120    stp   d0, d1, [x13, #16]
121    stp   d2, d3, [x13, #32]
122    stp   d4, d5, [x13, #48]
123    stp   d6, d7, [x13, #64]
124    stp   x1, x2, [x13, #80]
125    stp   x3, x4, [x13, #96]
126    stp   x5, x6, [x13, #112]
127    stp   x7, x20, [x13, #128]
128    stp   x21, x22, [x13, #144]
129    stp   x23, x24, [x13, #160]
130    stp   x25, x26, [x13, #176]
131    stp   x27, x28, [x13, #192]
132    stp   x29, xIP1, [x13, #208]  // xIP1: Save return address for tail call from JNI stub.
133    // (If there were any stack args, we're storing the value that's already there.
134    // For direct calls from compiled managed code, we shall overwrite this below.)
135
136    // Move the managed frame address to native callee-save register x29 and update CFI.
137    mov   x29, x13
138    // Skip args d0-d7, x1-x7
139    CFI_EXPRESSION_BREG 20, 29, 136
140    CFI_EXPRESSION_BREG 21, 29, 144
141    CFI_EXPRESSION_BREG 22, 29, 152
142    CFI_EXPRESSION_BREG 23, 29, 160
143    CFI_EXPRESSION_BREG 24, 29, 168
144    CFI_EXPRESSION_BREG 25, 29, 176
145    CFI_EXPRESSION_BREG 26, 29, 184
146    CFI_EXPRESSION_BREG 27, 29, 192
147    CFI_EXPRESSION_BREG 28, 29, 200
148    CFI_EXPRESSION_BREG 29, 29, 208
149    // The saved return PC for managed stack walk is not necessarily our LR.
150
151    // Save our return PC in the padding.
152    str   lr, [x29, #__SIZEOF_POINTER__]
153    CFI_EXPRESSION_BREG 30, 29, __SIZEOF_POINTER__
154
155    ldr   wIP0, [x15, #ART_METHOD_ACCESS_FLAGS_OFFSET]  // Load access flags.
156    add   x14, x29, #1            // Prepare managed SP tagged for a GenericJNI frame.
157    tbnz  wIP0, #ACCESS_FLAGS_METHOD_IS_NATIVE_BIT, .Lcritical_skip_prepare_runtime_method
158
159    // When coming from a compiled method, the return PC for managed stack walk is LR.
160    // (When coming from a compiled stub, the correct return PC is already stored above.)
161    str   lr, [x29, #(FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)]
162
163    // Replace the target method with the SaveRefsAndArgs runtime method.
164    LOAD_RUNTIME_INSTANCE x15
165    ldr   x15, [x15, #RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET]
166
167    mov   x14, x29                // Prepare untagged managed SP for the runtime method.
168
169.Lcritical_skip_prepare_runtime_method:
170    // Store the method on the bottom of the managed frame.
171    str   x15, [x29]
172
173    // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame.
174    str   x14, [xSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
175
176    // Preserve the native arg register x0 in callee-save register x28 which was saved above.
177    mov   x28, x0
178
179    // Call artFindNativeMethodRunnable()
180    mov   x0, xSELF   // pass Thread::Current()
181    bl    artFindNativeMethodRunnable
182
183    // Store result in scratch reg.
184    mov   x13, x0
185
186    // Restore the native arg register x0.
187    mov   x0, x28
188
189    // Restore our return PC.
190    RESTORE_REG_BASE x29, lr, __SIZEOF_POINTER__
191
192    // Remember the stack args size, negated because SP cannot be on the right-hand side in SUB.
193    sub   x14, sp, x29
194
195    // Restore the frame. We shall not need the method anymore.
196    ldp   d0, d1, [x29, #16]
197    ldp   d2, d3, [x29, #32]
198    ldp   d4, d5, [x29, #48]
199    ldp   d6, d7, [x29, #64]
200    ldp   x1, x2, [x29, #80]
201    ldp   x3, x4, [x29, #96]
202    ldp   x5, x6, [x29, #112]
203    ldp   x7, x20, [x29, #128]
204    .cfi_restore x20
205    RESTORE_TWO_REGS_BASE x29, x21, x22, 144
206    RESTORE_TWO_REGS_BASE x29, x23, x24, 160
207    RESTORE_TWO_REGS_BASE x29, x25, x26, 176
208    RESTORE_TWO_REGS_BASE x29, x27, x28, 192
209    RESTORE_REG_BASE x29, x29, 208
210
211    REFRESH_MARKING_REGISTER
212
213    // Check for exception before moving args back to keep the return PC for managed stack walk.
214    CFI_REMEMBER_STATE
215    cbz   x13, .Lcritical_deliver_exception
216
217    // Move stack args to their original place.
218    cbz   x14, .Lcritical_skip_copy_args_back
219    sub   x12, sp, x14
220.Lcritical_copy_args_back_loop:
221    ldp   xIP0, xIP1, [x12, #-16]!
222    adds  x14, x14, #16
223    stp   xIP0, xIP1, [x12, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
224    bne   .Lcritical_copy_args_back_loop
225.Lcritical_skip_copy_args_back:
226
227    // Remove the frame reservation.
228    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
229
230    // Do the tail call.
231    br    x13
232
233.Lcritical_deliver_exception:
234    CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_REFS_AND_ARGS
235    // The exception delivery checks that xSELF was saved but the SaveRefsAndArgs
236    // frame does not save it, so we cannot use the existing SaveRefsAndArgs frame.
237    // That's why we checked for exception after restoring registers from it.
238    // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this
239    // point but keep the area allocated for stack args to keep CFA definition simple.
240    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
241
242    // Calculate the base address of the managed frame.
243    sub   x13, sp, x14
244
245    // Spill registers for the SaveAllCalleeSaves frame above the stack args area. Do not update
246    // CFI while we hold the frame address in x13 and the values in registers are unchanged.
247    stp   d8, d9, [x13, #16]
248    stp   d10, d11, [x13, #32]
249    stp   d12, d13, [x13, #48]
250    stp   d14, d15, [x13, #64]
251    stp   x19, x20, [x13, #80]
252    stp   x21, x22, [x13, #96]
253    stp   x23, x24, [x13, #112]
254    stp   x25, x26, [x13, #128]
255    stp   x27, x28, [x13, #144]
256    str   x29, [x13, #160]
257    // Keep the caller PC for managed stack walk.
258
259    // Move the managed frame address to native callee-save register x29 and update CFI.
260    mov   x29, x13
261    CFI_EXPRESSION_BREG 19, 29, 80
262    CFI_EXPRESSION_BREG 20, 29, 88
263    CFI_EXPRESSION_BREG 21, 29, 96
264    CFI_EXPRESSION_BREG 22, 29, 104
265    CFI_EXPRESSION_BREG 23, 29, 112
266    CFI_EXPRESSION_BREG 24, 29, 120
267    CFI_EXPRESSION_BREG 25, 29, 128
268    CFI_EXPRESSION_BREG 26, 29, 136
269    CFI_EXPRESSION_BREG 27, 29, 144
270    CFI_EXPRESSION_BREG 28, 29, 152
271    CFI_EXPRESSION_BREG 29, 29, 160
272    // The saved return PC for managed stack walk is not necessarily our LR.
273
274    // Save our return PC in the padding.
275    str   lr, [x29, #__SIZEOF_POINTER__]
276    CFI_EXPRESSION_BREG 30, 29, __SIZEOF_POINTER__
277
278    // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame.
279    LOAD_RUNTIME_INSTANCE xIP0
280    ldr   xIP0, [xIP0, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET]
281    str   xIP0, [x29]
282
283    // Place the managed frame SP in Thread::Current()->top_quick_frame.
284    str   x29, [xSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
285
286    DELIVER_PENDING_EXCEPTION_FRAME_READY
287END art_jni_dlsym_lookup_critical_stub
288
289    /*
290     * Read barrier for the method's declaring class needed by JNI stub for static methods.
291     * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.)
292     */
293// The method argument is already in x0 for call to `artJniReadBarrier(ArtMethod*)`.
294JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier
295
296    /*
297     * Trampoline to `artJniMethodStart()` that preserves all managed arguments.
298     */
299JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, xSELF
300
301    /*
302     * Trampoline to `artJniMethodEntryHook` that preserves all managed arguments.
303     */
304JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_entry_hook, artJniMethodEntryHook, xSELF
305
306    /*
307     * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments.
308     */
309JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_monitored_method_start, artJniMonitoredMethodStart, xSELF
310
311    /*
312     * Trampoline to `artJniMethodEnd()` that preserves all return registers.
313     */
314JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, xSELF
315
316    /*
317     * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers.
318     */
319JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_monitored_method_end, artJniMonitoredMethodEnd, xSELF
320
321    /*
322     * Entry from JNI stub that tries to lock the object in a fast path and
323     * calls `artLockObjectFromCode()` (the same as for managed code) for the
324     * difficult cases, may block for GC.
325     * Custom calling convention:
326     *     x15 holds the non-null object to lock.
327     *     Callee-save registers have been saved and can be used as temporaries.
328     *     All argument registers need to be preserved.
329     */
330ENTRY art_jni_lock_object
331    LOCK_OBJECT_FAST_PATH x15, art_jni_lock_object_no_inline, /*can_be_null*/ 0
332END art_jni_lock_object
333
334    /*
335     * Entry from JNI stub that calls `artLockObjectFromCode()`
336     * (the same as for managed code), may block for GC.
337     * Custom calling convention:
338     *     x15 holds the non-null object to lock.
339     *     Callee-save registers have been saved and can be used as temporaries.
340     *     All argument registers need to be preserved.
341     */
342    .extern artLockObjectFromCode
343ENTRY art_jni_lock_object_no_inline
344    // This is also the slow path for art_jni_lock_object.
345    // Save args and LR.
346    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*LR*/ 8
347    str    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
348    .cfi_rel_offset lr, ALL_ARGS_SIZE + /*padding*/ 8
349    // Call `artLockObjectFromCode()`.
350    mov    x0, x15                    // Pass the object to lock.
351    mov    x1, xSELF                  // Pass Thread::Current().
352    bl     artLockObjectFromCode      // (Object* obj, Thread*)
353    // Restore return address.
354    ldr    lr, [sp, #(ALL_ARGS_SIZE + /*padding*/ 8)]
355    .cfi_restore lr
356    // Check result.
357    cbnz   x0, 1f
358    // Restore register args x0-x7, d0-d7 and return.
359    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*LR*/ 8
360    ret
361    .cfi_adjust_cfa_offset (ALL_ARGS_SIZE + /*padding*/ 8 + /*LR*/ 8)
3621:
363    // All args are irrelevant when throwing an exception. Remove the spill area.
364    DECREASE_FRAME (ALL_ARGS_SIZE + /*padding*/ 8 + /*LR*/ 8)
365    // Make a call to `artDeliverPendingExceptionFromCode()`.
366    // Rely on the JNI transition frame constructed in the JNI stub.
367    mov    x0, xSELF                            // Pass Thread::Current().
368    bl     artDeliverPendingExceptionFromCode   // (Thread*)
369    bl     art_quick_do_long_jump               // (Context*)
370    brk 0  // Unreached
371END art_jni_lock_object_no_inline
372
373    /*
374     * Entry from JNI stub that tries to unlock the object in a fast path and calls
375     * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock
376     * is fatal, so we do not need to check for exceptions in the slow path.
377     * Custom calling convention:
378     *     x15 holds the non-null object to unlock.
379     *     Callee-save registers have been saved and can be used as temporaries.
380     *     Return registers r0 and d0 need to be preserved.
381     */
382ENTRY art_jni_unlock_object
383    UNLOCK_OBJECT_FAST_PATH x15, art_jni_unlock_object_no_inline, /*can_be_null*/ 0
384END art_jni_unlock_object
385
386    /*
387     * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to
388     * unlock is fatal, so we do not need to check for exceptions.
389     * Custom calling convention:
390     *     x15 holds the non-null object to unlock.
391     *     Callee-save registers have been saved and can be used as temporaries.
392     *     Return registers r0 and d0 need to be preserved.
393     */
394    // This is also the slow path for art_jni_unlock_object.
395JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_unlock_object_no_inline, artJniUnlockObject, x15, xSELF
396