xref: /aosp_15_r20/art/runtime/arch/arm64/native_entrypoints_arm64.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1/*
2 * Copyright (C) 2024 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18#include "interpreter/cfi_asm_support.h"
19
20/*
21 * This file contains all native entrypoints that are called using the native ABI and do not
22 * transition to the quick ABI. For example: the switch interpreter (using the native ABI) directly
23 * calls ExecuteSwitchImplAsm and this code will always return back to the switch interpreter,
24 * again using the native ABI. Because of this behaviour ExecuteSwitchImplAsm should be included in
25 * this file. This is done so these native entrypoints can be compiled independently to quick
26 * entrypoints for cases when the kRuntimeISA and kRuntimeQuickCodeISA do not match.
27 *
28 * See comment on StackType (thread.h) for definitions and examples of quick ABI/code and
29 * native ABI/code.
30 */
31
32// Wrap ExecuteSwitchImpl in assembly method which specifies DEX PC for unwinding.
33//  Argument 0: x0: The context pointer for ExecuteSwitchImpl.
34//  Argument 1: x1: Pointer to the templated ExecuteSwitchImpl to call.
35//  Argument 2: x2: The value of DEX PC (memory address of the methods bytecode).
36ENTRY ExecuteSwitchImplAsm
37    SAVE_TWO_REGS_INCREASE_FRAME x19, xLR, 16
38    mov x19, x2                                   // x19 = DEX PC
39    CFI_DEFINE_DEX_PC_WITH_OFFSET(0 /* x0 */, 19 /* x19 */, 0)
40    blr x1                                        // Call the wrapped method.
41    RESTORE_TWO_REGS_DECREASE_FRAME x19, xLR, 16
42    ret
43END ExecuteSwitchImplAsm
44
45    /*
46     * Jni dlsym lookup stub.
47     */
48    .extern artFindNativeMethod
49    .extern artFindNativeMethodRunnable
50ENTRY art_jni_dlsym_lookup_stub
51    // spill regs.
52    SAVE_ALL_ARGS_INCREASE_FRAME 2 * 8
53    stp   x29, x30, [sp, ALL_ARGS_SIZE]
54    .cfi_rel_offset x29, ALL_ARGS_SIZE
55    .cfi_rel_offset x30, ALL_ARGS_SIZE + 8
56    add   x29, sp, ALL_ARGS_SIZE
57
58    mov x0, xSELF   // pass Thread::Current()
59    // Call artFindNativeMethod() for normal native and artFindNativeMethodRunnable()
60    // for @FastNative or @CriticalNative.
61    ldr   xIP0, [x0, #THREAD_TOP_QUICK_FRAME_OFFSET]      // uintptr_t tagged_quick_frame
62    bic   xIP0, xIP0, #TAGGED_JNI_SP_MASK                 // ArtMethod** sp
63    ldr   xIP0, [xIP0]                                    // ArtMethod* method
64    ldr   xIP0, [xIP0, #ART_METHOD_ACCESS_FLAGS_OFFSET]   // uint32_t access_flags
65    mov   xIP1, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE)
66    tst   xIP0, xIP1
67    b.ne  .Llookup_stub_fast_or_critical_native
68    bl    artFindNativeMethod
69    b     .Llookup_stub_continue
70    .Llookup_stub_fast_or_critical_native:
71    bl    artFindNativeMethodRunnable
72.Llookup_stub_continue:
73    mov   x17, x0    // store result in scratch reg.
74
75    // load spill regs.
76    ldp   x29, x30, [sp, #ALL_ARGS_SIZE]
77    .cfi_restore x29
78    .cfi_restore x30
79    RESTORE_ALL_ARGS_DECREASE_FRAME 2 * 8
80
81    cbz   x17, 1f   // is method code null ?
82    br    x17       // if non-null, tail call to method's code.
83
841:
85    ret             // restore regs and return to caller to handle exception.
86END art_jni_dlsym_lookup_stub
87