xref: /aosp_15_r20/art/runtime/arch/riscv64/jni_entrypoints_riscv64.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_riscv64.S"
18
19
20.macro JNI_SAVE_MANAGED_ARGS_TRAMPOLINE name, cxx_name, arg1 = "none"
21    .extern \cxx_name
22ENTRY \name
23    // Save args and RA.
24    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*RA*/ 8
25    SAVE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
26    // Call `cxx_name()`.
27    .ifnc \arg1, none
28        mv    a0, \arg1
29    .endif
30    call   \cxx_name
31    // Restore RA and args and return.
32    RESTORE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
33    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*RA*/ 8
34    ret
35END \name
36.endm
37
38
39.macro JNI_SAVE_RETURN_VALUE_TRAMPOLINE name, cxx_name, arg1, arg2 = "none"
40    .extern \cxx_name
41ENTRY \name
42    // Save return registers and return address.
43    INCREASE_FRAME 32
44    sd    a0, 0(sp)
45    fsd   fa0, 8(sp)
46    SAVE_GPR ra, 24
47    // Call `cxx_name()`.
48    mv    a0, \arg1
49    .ifnc \arg2, none
50        mv    a1, \arg2
51    .endif
52    call  \cxx_name
53    // Restore result registers and return.
54    ld    a0, 0(sp)
55    fld   fa0, 8(sp)
56    RESTORE_GPR ra, 24
57    DECREASE_FRAME 32
58    ret
59END \name
60.endm
61
62
63// JNI dlsym lookup stub for @CriticalNative.
64ENTRY art_jni_dlsym_lookup_critical_stub
65    // The hidden arg holding the tagged method is t0 (loaded by compiled JNI stub, compiled
66    // managed code, or `art_quick_generic_jni_trampoline`). Bit 0 set means generic JNI.
67    // For generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub.
68    // Note: 'bnez' doesn't always have enough range (+/-4KB) to reach art_jni_dlsym_lookup_stub so
69    // 'tail' is used instead.
70    andi  t6, t0, 1
71    beqz  t6, .Lcritical_not_generic_jni
72    tail  art_jni_dlsym_lookup_stub
73
74.Lcritical_not_generic_jni:
75    // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg.
76    SAVE_ALL_ARGS_INCREASE_FRAME 2*8
77    SAVE_GPR t0, (ALL_ARGS_SIZE + 0)
78    SAVE_GPR ra, (ALL_ARGS_SIZE + 8)
79
80    // Call artCriticalNativeFrameSize(method, caller_pc)
81    mv    a0, t0  // a0 := method (from hidden arg)
82    mv    a1, ra  // a1 := caller_pc
83    call  artCriticalNativeFrameSize
84
85    // Move frame size to T2.
86    mv    t2, a0
87
88    // Restore args, the hidden arg and caller PC.
89    RESTORE_GPR t0, (ALL_ARGS_SIZE + 0)
90    RESTORE_GPR ra, (ALL_ARGS_SIZE + 8)
91    RESTORE_ALL_ARGS_DECREASE_FRAME 2*8
92
93    // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime
94    // method or for a GenericJNI frame which is similar but has a native method and a tag.
95    // Add space for RA and padding to keep the stack 16-byte aligned.
96    INCREASE_FRAME (FRAME_SIZE_SAVE_REFS_AND_ARGS + 16)
97
98    // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame.
99    // If we're coming from JNI stub with tail call, it is RA. If we're coming from
100    // JNI stub that saved the return address, it will be the last value we copy below.
101    // If we're coming directly from compiled code, it is RA, set further down.
102    mv    t4, ra
103
104    // Move the stack args if any. Calculate the base address of the managed frame in the process.
105    addi  t1, sp, 16
106    beqz  t2, .Lcritical_skip_copy_args
107.Lcritical_copy_args_loop:
108    ld    t3, FRAME_SIZE_SAVE_REFS_AND_ARGS+0(t1)
109    ld    t4, FRAME_SIZE_SAVE_REFS_AND_ARGS+8(t1)
110    addi  t2, t2, -16
111    sd    t3, 0-16(t1)
112    sd    t4, 8-16(t1)
113    addi  t1, t1, 16
114    bnez  t2, .Lcritical_copy_args_loop
115.Lcritical_skip_copy_args:
116
117    // Spill registers for the SaveRefsAndArgs frame above the stack args.
118    // Note that the runtime shall not examine the args here, otherwise we would have to
119    // move them in registers and stack to account for the difference between managed and
120    // native ABIs. Do not update CFI while we hold the frame address in T1 and the values
121    // in registers are unchanged.
122    // stack slot (0*8)(t1) is for ArtMethod*
123    fsd   fa0, (1*8)(t1)
124    fsd   fa1, (2*8)(t1)
125    fsd   fa2, (3*8)(t1)
126    fsd   fa3, (4*8)(t1)
127    fsd   fa4, (5*8)(t1)
128    fsd   fa5, (6*8)(t1)
129    fsd   fa6, (7*8)(t1)
130    fsd   fa7, (8*8)(t1)
131    sd    fp,  (9*8)(t1)   // x8, frame pointer
132    // s1 (x9) is the ART thread register
133    // a0 (x10) is the method pointer
134    sd    a1,  (10*8)(t1)  // x11
135    sd    a2,  (11*8)(t1)  // x12
136    sd    a3,  (12*8)(t1)  // x13
137    sd    a4,  (13*8)(t1)  // x14
138    sd    a5,  (14*8)(t1)  // x15
139    sd    a6,  (15*8)(t1)  // x16
140    sd    a7,  (16*8)(t1)  // x17
141    sd    s2,  (17*8)(t1)  // x18
142    sd    s3,  (18*8)(t1)  // x19
143    sd    s4,  (19*8)(t1)  // x20
144    sd    s5,  (20*8)(t1)  // x21
145    sd    s6,  (21*8)(t1)  // x22
146    sd    s7,  (22*8)(t1)  // x23
147    sd    s8,  (23*8)(t1)  // x24
148    sd    s9,  (24*8)(t1)  // x25
149    sd    s10, (25*8)(t1)  // x26
150    sd    s11, (26*8)(t1)  // x27
151    sd    t4,  (27*8)(t1)  // t4: Save return address for tail call from JNI stub.
152    // (If there were any stack args, we're storing the value that's already there.
153    // For direct calls from compiled managed code, we shall overwrite this below.)
154
155    // Move the managed frame address to native callee-save register fp (x8) and update CFI.
156    mv    fp, t1
157    // Skip args FA0-FA7, A1-A7
158    CFI_EXPRESSION_BREG  8, 8, (9*8)
159    CFI_EXPRESSION_BREG 18, 8, (17*8)
160    CFI_EXPRESSION_BREG 19, 8, (18*8)
161    CFI_EXPRESSION_BREG 20, 8, (19*8)
162    CFI_EXPRESSION_BREG 21, 8, (20*8)
163    CFI_EXPRESSION_BREG 22, 8, (21*8)
164    CFI_EXPRESSION_BREG 23, 8, (22*8)
165    CFI_EXPRESSION_BREG 24, 8, (23*8)
166    CFI_EXPRESSION_BREG 25, 8, (24*8)
167    CFI_EXPRESSION_BREG 26, 8, (25*8)
168    CFI_EXPRESSION_BREG 27, 8, (26*8)
169    // The saved return PC for managed stack walk is not necessarily our RA.
170
171    // Save our return PC below the managed frame.
172    sd    ra, -__SIZEOF_POINTER__(fp)
173    CFI_EXPRESSION_BREG 1, 8, -__SIZEOF_POINTER__
174
175    lw    t2, ART_METHOD_ACCESS_FLAGS_OFFSET(t0)  // Load access flags.
176    addi  t1, fp, 1        // Prepare managed SP tagged for a GenericJNI frame.
177    slliw t2, t2, 31 - ACCESS_FLAGS_METHOD_IS_NATIVE_BIT
178    bltz  t2, .Lcritical_skip_prepare_runtime_method
179
180    // When coming from a compiled method, the return PC for managed stack walk is RA.
181    // (When coming from a compiled stub, the correct return PC is already stored above.)
182    sd    ra, (FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)(fp)
183
184    // Replace the target method with the SaveRefsAndArgs runtime method.
185    LOAD_RUNTIME_INSTANCE t0
186    ld    t0, RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET(t0)
187
188    mv    t1, fp           // Prepare untagged managed SP for the runtime method.
189
190.Lcritical_skip_prepare_runtime_method:
191    // Store the method on the bottom of the managed frame.
192    sd    t0, (fp)
193
194    // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame.
195    sd    t1, THREAD_TOP_QUICK_FRAME_OFFSET(xSELF)
196
197    // Preserve the native arg register A0 in callee-save register S2 (x18) which was saved above.
198    mv    s2, a0
199
200    // Call artFindNativeMethodRunnable()
201    mv    a0, xSELF   // pass Thread::Current()
202    call  artFindNativeMethodRunnable
203
204    // Store result in scratch reg.
205    mv    t0, a0
206
207    // Restore the native arg register A0.
208    mv    a0, s2
209
210    // Restore our return PC.
211    RESTORE_GPR_BASE fp, ra, -__SIZEOF_POINTER__
212
213    // Remember the end of out args before restoring FP.
214    addi  t1, fp, -16
215
216    // Restore arg registers.
217    fld   fa0, (1*8)(fp)
218    fld   fa1, (2*8)(fp)
219    fld   fa2, (3*8)(fp)
220    fld   fa3, (4*8)(fp)
221    fld   fa4, (5*8)(fp)
222    fld   fa5, (6*8)(fp)
223    fld   fa6, (7*8)(fp)
224    fld   fa7, (8*8)(fp)
225    // fp (x8) is restored last to keep CFI data valid until then.
226    // s1 (x9) is the ART thread register
227    // a0 (x10) is the method pointer
228    ld    a1,  (10*8)(fp)  // x11
229    ld    a2,  (11*8)(fp)  // x12
230    ld    a3,  (12*8)(fp)  // x13
231    ld    a4,  (13*8)(fp)  // x14
232    ld    a5,  (14*8)(fp)  // x15
233    ld    a6,  (15*8)(fp)  // x16
234    ld    a7,  (16*8)(fp)  // x17
235    RESTORE_GPR_BASE fp, s2,  (17*8)  // x18
236    RESTORE_GPR_BASE fp, s3,  (18*8)  // x19
237    RESTORE_GPR_BASE fp, s4,  (19*8)  // x20
238    RESTORE_GPR_BASE fp, s5,  (20*8)  // x21
239    RESTORE_GPR_BASE fp, s6,  (21*8)  // x22
240    RESTORE_GPR_BASE fp, s7,  (22*8)  // x23
241    RESTORE_GPR_BASE fp, s8,  (23*8)  // x24
242    RESTORE_GPR_BASE fp, s9,  (24*8)  // x25
243    RESTORE_GPR_BASE fp, s10, (25*8)  // x26
244    RESTORE_GPR_BASE fp, s11, (26*8)  // x27
245    RESTORE_GPR_BASE fp, fp,  (9*8)   // fp (x8) is restored last
246
247    // Check for exception before moving args back to keep the return PC for managed stack walk.
248    CFI_REMEMBER_STATE
249    beqz  t0, .Lcritical_deliver_exception
250
251    // Move stack args to their original place.
252    beq   t1, sp, .Lcritical_skip_copy_args_back
253    sub   t2, t1, sp
254.Lcritical_copy_args_back_loop:
255    ld    t3, 0-16(t1)
256    ld    t4, 8-16(t1)
257    addi  t2, t2, -16
258    sd    t3, FRAME_SIZE_SAVE_REFS_AND_ARGS+0(t1)
259    sd    t4, FRAME_SIZE_SAVE_REFS_AND_ARGS+8(t1)
260    addi  t1, t1, -16
261    bnez  t2, .Lcritical_copy_args_back_loop
262.Lcritical_skip_copy_args_back:
263
264    // Remove the frame reservation.
265    DECREASE_FRAME (FRAME_SIZE_SAVE_REFS_AND_ARGS + 16)
266
267    // Do the tail call.
268    jr    t0
269
270.Lcritical_deliver_exception:
271    CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_REFS_AND_ARGS + 16
272    // If this is called from a method that catches the exception, all callee-save registers need
273    // to be saved, so that the exception handling code can read them in case they contain live
274    // values later used by that method. This includes callee-save FP registers which are not
275    // saved in a SaveRefsAndArgs frame, so we cannot reuse the managed frame we have built above.
276    // That's why we checked for exception after restoring registers from that frame.
277    // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this
278    // point but keep the area allocated for stack args to keep CFA definition simple.
279#if FRAME_SIZE_SAVE_ALL_CALLEE_SAVES > FRAME_SIZE_SAVE_REFS_AND_ARGS
280#error "Expanding stack frame from kSaveRefsAndArgs to kSaveAllCalleeSaves is not implemented."
281#endif
282    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
283
284    // Calculate the base address of the managed frame.
285    addi  t1, t1, 16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
286
287    // Spill registers for the SaveAllCalleeSaves frame above the stack args area. Do not update
288    // CFI while we hold the frame address in T1 and the values in registers are unchanged.
289    // stack slot (0*8)(t1) is for ArtMethod*
290    // stack slot (1*8)(t1) is for padding
291    // FP callee-saves.
292    fsd   fs0,  (8*2)(t1)   // f8
293    fsd   fs1,  (8*3)(t1)   // f9
294    fsd   fs2,  (8*4)(t1)   // f18
295    fsd   fs3,  (8*5)(t1)   // f19
296    fsd   fs4,  (8*6)(t1)   // f20
297    fsd   fs5,  (8*7)(t1)   // f21
298    fsd   fs6,  (8*8)(t1)   // f22
299    fsd   fs7,  (8*9)(t1)   // f23
300    fsd   fs8,  (8*10)(t1)  // f24
301    fsd   fs9,  (8*11)(t1)  // f25
302    fsd   fs10, (8*12)(t1)  // f26
303    fsd   fs11, (8*13)(t1)  // f27
304    // GP callee-saves
305    sd    s0,  (8*14)(t1)  // x8/fp, frame pointer
306    // s1 (x9) is the ART thread register
307    sd    s2,  (8*15)(t1)  // x18
308    sd    s3,  (8*16)(t1)  // x19
309    sd    s4,  (8*17)(t1)  // x20
310    sd    s5,  (8*18)(t1)  // x21
311    sd    s6,  (8*19)(t1)  // x22
312    sd    s7,  (8*20)(t1)  // x23
313    sd    s8,  (8*21)(t1)  // x24
314    sd    s9,  (8*22)(t1)  // x25
315    sd    s10, (8*23)(t1)  // x26
316    sd    s11, (8*24)(t1)  // x27
317    // Keep the caller PC for managed stack walk.
318
319    // Move the managed frame address to native callee-save register fp (x8) and update CFI.
320    mv    fp, t1
321    CFI_EXPRESSION_BREG  8, 8, (14*8)  // fp/x8: The base register for these CFI expressions.
322    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 8, 8, (8*2)    // fs0/f8
323    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 9, 8, (8*3)    // fs1/f9
324    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 18, 8, (8*4)   // fs2/f18
325    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 19, 8, (8*5)   // fs3/f19
326    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 20, 8, (8*6)   // fs4/f20
327    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 21, 8, (8*7)   // fs5/f21
328    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 22, 8, (8*8)   // fs6/f22
329    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 23, 8, (8*9)   // fs7/f23
330    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 24, 8, (8*10)  // fs8/f24
331    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 25, 8, (8*11)  // fs9/f25
332    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 26, 8, (8*12)  // fs10/f26
333    // CFI expression for fp (x8) already emitted above.
334    CFI_EXPRESSION_BREG 18, 8, (15*8)  // s2/x18
335    CFI_EXPRESSION_BREG 19, 8, (16*8)  // s3/x19
336    CFI_EXPRESSION_BREG 20, 8, (17*8)  // s4/x20
337    CFI_EXPRESSION_BREG 21, 8, (18*8)  // s5/x21
338    CFI_EXPRESSION_BREG 22, 8, (19*8)  // s6/x22
339    CFI_EXPRESSION_BREG 23, 8, (20*8)  // s7/x23
340    CFI_EXPRESSION_BREG 24, 8, (21*8)  // s8/x24
341    CFI_EXPRESSION_BREG 25, 8, (22*8)  // s9/x25
342    CFI_EXPRESSION_BREG 26, 8, (23*8)  // s10/x26
343    CFI_EXPRESSION_BREG 27, 8, (24*8)  // s11/x27
344    // The saved return PC for managed stack walk is not necessarily our RA.
345
346    // Save our return PC below the managed frame.
347    sd    ra, -__SIZEOF_POINTER__(fp)
348    CFI_EXPRESSION_BREG 1, 8, -__SIZEOF_POINTER__
349
350    // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame.
351    LOAD_RUNTIME_INSTANCE t0
352    ld    t0, RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET(t0)
353    sd    t0, (fp)
354
355    // Place the managed frame SP in Thread::Current()->top_quick_frame.
356    sd    fp, THREAD_TOP_QUICK_FRAME_OFFSET(xSELF)
357
358    DELIVER_PENDING_EXCEPTION_FRAME_READY
359END art_jni_dlsym_lookup_critical_stub
360
361    /*
362     * Read barrier for the method's declaring class needed by JNI stub for static methods.
363     * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.)
364     */
365// The method argument is already in a0 for call to `artJniReadBarrier(ArtMethod*)`.
366JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier
367
368    /*
369     * Trampoline to `artJniMethodStart()` that preserves all managed arguments.
370     */
371JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, xSELF
372
373    /*
374     * Trampoline to `artJniMethodEntryHook` that preserves all managed arguments.
375     */
376JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_entry_hook, artJniMethodEntryHook, xSELF
377
378    /*
379     * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments.
380     */
381JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_monitored_method_start, artJniMonitoredMethodStart, xSELF
382
383    /*
384     * Trampoline to `artJniMethodEnd()` that preserves all return registers.
385     */
386JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, xSELF
387
388    /*
389     * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers.
390     */
391JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_monitored_method_end, artJniMonitoredMethodEnd, xSELF
392
393    /*
394     * Entry from JNI stub that tries to lock the object in a fast path and
395     * calls `artLockObjectFromCode()` (the same as for managed code) for the
396     * difficult cases, may block for GC.
397     * Custom calling convention:
398     *     T0 holds the non-null object to lock.
399     *     Callee-save registers have been saved and can be used as temporaries.
400     *     All argument registers need to be preserved.
401     */
402ENTRY art_jni_lock_object
403    LOCK_OBJECT_FAST_PATH t0, art_jni_lock_object_no_inline, /*can_be_null*/ 0
404END art_jni_lock_object
405
406    /*
407     * Entry from JNI stub that calls `artLockObjectFromCode()`
408     * (the same as for managed code), may block for GC.
409     * Custom calling convention:
410     *     T0 holds the non-null object to lock.
411     *     Callee-save registers have been saved and can be used as temporaries.
412     *     All argument registers need to be preserved.
413     */
414    .extern artLockObjectFromCode
415ENTRY art_jni_lock_object_no_inline
416    // This is also the slow path for art_jni_lock_object.
417    // Save args and RA.
418    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*RA*/ 8
419    SAVE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
420    // Call `artLockObjectFromCode()`.
421    mv    a0, t0                     // Pass the object to lock.
422    mv    a1, xSELF                  // Pass Thread::Current().
423    call  artLockObjectFromCode      // (Object* obj, Thread*)
424    // Restore return address.
425    RESTORE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
426    // Check result.
427    bnez   a0, 1f
428    // Restore register args a0-a7, fa0-fa7 and return.
429    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*RA*/ 8
430    ret
431    .cfi_adjust_cfa_offset (ALL_ARGS_SIZE + /*padding*/ 8 + /*RA*/ 8)
4321:
433    // All args are irrelevant when throwing an exception. Remove the spill area.
434    DECREASE_FRAME (ALL_ARGS_SIZE + /*padding*/ 8 + /*RA*/ 8)
435    // Make a call to `artDeliverPendingExceptionFromCode()`.
436    // Rely on the JNI transition frame constructed in the JNI stub.
437    mv     a0, xSELF                           // Pass Thread::Current().
438    call   artDeliverPendingExceptionFromCode  // (Thread*)
439    call   art_quick_do_long_jump              // (Context*)
440    unimp  // Unreached
441END art_jni_lock_object_no_inline
442
443    /*
444     * Entry from JNI stub that tries to unlock the object in a fast path and calls
445     * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock
446     * is fatal, so we do not need to check for exceptions in the slow path.
447     * Custom calling convention:
448     *     T0 holds the non-null object to unlock.
449     *     Callee-save registers have been saved and can be used as temporaries.
450     *     Return registers a0 and fa0 need to be preserved.
451     */
452ENTRY art_jni_unlock_object
453    UNLOCK_OBJECT_FAST_PATH t0, art_jni_unlock_object_no_inline, /*can_be_null*/ 0
454END art_jni_unlock_object
455
456    /*
457     * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to
458     * unlock is fatal, so we do not need to check for exceptions.
459     * Custom calling convention:
460     *     T0 holds the non-null object to unlock.
461     *     Callee-save registers have been saved and can be used as temporaries.
462     *     Return registers a0 and fa0 need to be preserved.
463     */
464    // This is also the slow path for art_jni_unlock_object.
465JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_unlock_object_no_inline, artJniUnlockObject, t0, xSELF
466