xref: /aosp_15_r20/art/runtime/interpreter/mterp/riscv64/other.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1%def unused():
2   ebreak
3
4// nop
5// Format 10x: 00|00
6%def op_nop():
7   FETCH_ADVANCE_INST 1  // advance xPC, load xINST
8   GET_INST_OPCODE t0    // t0 holds next opcode
9   GOTO_OPCODE t0        // continue to next
10
11// move vA, vB
12// Format 12x: B|A|01
13%def op_move(is_object=False, is_wide=False):
14   srliw t1, xINST, 12   // t1 := B
15   srliw t2, xINST, 8    // t2 := B|A
16%  if is_object:
17     // Note: leaves a useful breadcrumb if the reference is corrupted, unlike GET_VREG_OBJECT.
18%    get_vreg("t1", "t1", is_unsigned=True)  # t1 = fp[B], zext
19%  else:
20%    get_vreg("t1", "t1", is_wide=is_wide)  # t1 := fp[B]
21%#:
22   and t2, t2, 0xF       // t2 := A
23   FETCH_ADVANCE_INST 1  // advance xPC, load xINST
24   GET_INST_OPCODE t3    // t3 holds next opcode
25%  if is_object:
26     SET_VREG_OBJECT t1, t2, z0=t0  // refs[A] := fp[B]
27%  else:
28%    set_vreg("t1", "t2", z0="t0", is_wide=is_wide)  # fp[A] := fp[B]
29%#:
30   GOTO_OPCODE t3       // continue to next
31
32// move/from16 vAA, vBBBB
33// Format 22x: AA|16 BBBB
34%def op_move_from16(is_object=False, is_wide=False):
35   FETCH t1, count=1     // t1 := BBBB
36   srliw t2, xINST, 8    // t2 := AA
37%  if is_object:
38     // Note: leaves a useful breadcrumb if the reference is corrupted, unlike GET_VREG_OBJECT.
39%    get_vreg("t1", "t1", is_unsigned=True)  # t1 = fp[BBBB], zext
40%  else:
41%    get_vreg("t1", "t1", is_wide=is_wide)  # t1 := fp[BBBB]
42%#:
43   FETCH_ADVANCE_INST 2  // advance xPC, load xINST
44   GET_INST_OPCODE t3    // t3 := next opcode
45%  if is_object:
46     SET_VREG_OBJECT t1, t2, z0=t0  // refs[AA] := fp[BBBB]
47%  else:
48%    set_vreg("t1", "t2", z0="t0", is_wide=is_wide)  # fp[AA] := fp[BBBB]
49%#:
50   GOTO_OPCODE t3        // continue to next
51
52// move/16 vAAAA, vBBBB
53// Format 32x: 00|03 AAAA BBBB
54%def op_move_16(is_object=False, is_wide=False):
55   FETCH t1, count=2     // t1 := BBBB
56   FETCH t2, count=1     // t2 := AAAA
57%  if is_object:
58     // Note: leaves a useful breadcrumb if the reference is corrupted, unlike GET_VREG_OBJECT.
59%    get_vreg("t1", "t1", is_unsigned=True)  # t1 = fp[BBBB], zext
60%  else:
61%    get_vreg("t1", "t1", is_wide=is_wide)  # t1 := fp[BBBB]
62%#:
63   FETCH_ADVANCE_INST 3  // advance xPC, load xINST
64   GET_INST_OPCODE t3    // t3 := next opcode
65%  if is_object:
66     SET_VREG_OBJECT t1, t2, z0=t0  // refs[AAAA] := fp[BBBB]
67%  else:
68%    set_vreg("t1", "t2", z0="t0", is_wide=is_wide)  # fp[AAAA] := fp[BBBB]
69%#:
70   GOTO_OPCODE t3        // continue to next
71
72// move-wide vA, vB
73// Format 12x: B|A|04
74// NOTE: vregs can overlap, e.g. "move-wide v6,v7" or "move-wide v7,v6"
75%def op_move_wide():
76%  op_move(is_wide=True)
77
78// move-wide/from16 vAA, vBBBB
79// Format 22x: AA|05 BBBB
80// NOTE: vregs can overlap, e.g. "move-wide v6,v7" or "move-wide v7,v6"
81%def op_move_wide_from16():
82%  op_move_from16(is_wide=True)
83
84// move-wide/16, vAAAA, vBBBB
85// Format 32x: 00|06 AAAA BBBB
86// NOTE: vregs can overlap, e.g. "move-wide v6,v7" or "move-wide v7,v6"
87%def op_move_wide_16():
88%  op_move_16(is_wide=True)
89
90// move-object vA, vB
91// Format 12x: B|A|07
92%def op_move_object():
93%  op_move(is_object=True)
94
95// move-object/from16 vAA, vBBBB
96// Format 22x: AA|08 BBBB
97%def op_move_object_from16():
98%  op_move_from16(is_object=True)
99
100// move-object/16 vAAAA, vBBBB
101// Format 32x: 00|09 AAAA BBBB
102%def op_move_object_16():
103%  op_move_16(is_object=True)
104
105// move-result vAA
106// Format 11x: AA|0a
107%def op_move_result(is_object=False, is_wide=False):
108   srliw t1, xINST, 8    // t1 := AA
109   FETCH_ADVANCE_INST 1  // advance xPC, load xINST
110   GET_INST_OPCODE t2    // t2 := next opcode
111%  if is_object:
112     SET_VREG_OBJECT a0, t1, z0=t0  // refs[AA] := a0
113%  else:
114%    set_vreg("a0", "t1", z0="t0", is_wide=is_wide)  # fp[AA] := a0
115%#:
116   GOTO_OPCODE t2        // continue to next
117
118// move-result-wide vAA
119// Format 11x: AA|0b
120%def op_move_result_wide():
121%  op_move_result(is_wide=True)
122
123// move-result-object vAA
124// Format 11x: AA|0c
125%def op_move_result_object():
126%  op_move_result(is_object=True)
127
128// move-exception vAA
129// Format 11x: AA|0d
130%def op_move_exception():
131   ld t1, THREAD_EXCEPTION_OFFSET(xSELF)    // t1 := exception object
132   srliw t2, xINST, 8                       // t2 := AA
133   FETCH_ADVANCE_INST 1                     // advance xPC, load xINST
134   SET_VREG_OBJECT t1, t2, z0=t0            // refs[AA] := exception object
135   GET_INST_OPCODE t3                       // t3 := next opcode
136   sd zero, THREAD_EXCEPTION_OFFSET(xSELF)  // clear exception
137   GOTO_OPCODE t3                           // continue to next
138
139// const/4 vA, #+B
140// Format 11n: B|A|12
141// Clobbers: t0, t1, t2, t3
142%def op_const_4():
143   slliw t1, xINST, 16     // B as MSB of word
144   sraiw t1, t1, 28        // t1 := sssssssB
145   slliw t2, xINST, 20     // A as MSB of word
146   srliw t2, t2, 28        // t2 := A
147   FETCH_ADVANCE_INST 1    // advance xPC, load xINST
148   GET_INST_OPCODE t3      // t3 holds next opcode
149%  set_vreg("t1", "t2", z0="t0")  # fp[A] := sssssssB
150   GOTO_OPCODE t3          // continue to next
151
152// const/16 vAA, #+BBBB
153// Format 21s: AA|13 BBBB
154// Clobbers: t0, t1, t2, t3
155%def op_const_16(is_wide=False):
156   FETCH t1, count=1, signed=1
157                         // t1 := ssssssssssssBBBB
158   srliw t2, xINST, 8    // t2 := AA
159   FETCH_ADVANCE_INST 2  // advance xPC, load xINST
160   GET_INST_OPCODE t3    // t3 := next opcode
161%  set_vreg("t1", "t2", z0="t0", is_wide=is_wide)
162                         // fp[AA] := +BBBB
163   GOTO_OPCODE t3        // continue to next
164
165// const vAA, #+BBBBBBBB
166// Format 31i: AA|14 BBBB(lo) BBBB(hi)
167// Clobbers: t0, t1, t2, t3
168%def op_const(is_wide=False):
169   FETCH t1, count=1, signed=1, width=32
170                         // t1 := ssssssssBBBBBBBB
171   srliw t2, xINST, 8    // t2 := AA
172   FETCH_ADVANCE_INST 3  // advance xPC, load xINST
173   GET_INST_OPCODE t3    // t3 := next opcode
174%  set_vreg("t1", "t2", z0="t0", is_wide=is_wide)
175                         // fp[AA] := +BBBBBBBB
176   GOTO_OPCODE t3        // continue to next
177
178// const/high16 vAA, #+BBBB0000
179// Format 21h: AA|15 BBBB
180// Clobbers: t0, t1, t2, t3
181%def op_const_high16():
182   FETCH t1, count=1       // t1 := BBBB
183   srliw t2, xINST, 8      // t2 := AA
184   slliw t1, t1, 16        // t1 := BBBB0000
185   FETCH_ADVANCE_INST 2    // advance xPC, load xINST
186   GET_INST_OPCODE t3      // t3 := next opcode
187%  set_vreg("t1", "t2", z0="t0")  # fp[AA] := BBBB0000
188   GOTO_OPCODE t3          // continue to next
189
190// const-wide/16 vAA, #+BBBB
191// Format 21s: AA|16 BBBB
192%def op_const_wide_16():
193%   op_const_16(is_wide=True)
194
195// const-wide/32 vAA, #+BBBBBBBB
196// Format 31i: AA|17 BBBB(lo) BBBB(hi)
197%def op_const_wide_32():
198%   op_const(is_wide=True)
199
200// const-wide vAA, #+BBBBBBBBBBBBBBBB
201// Format 51l: AA|18 BBBB(lo) BBBB BBBB BBBB(hi)
202%def op_const_wide():
203   FETCH t1, count=1, width=64
204                         // t1 := BBBBBBBBBBBBBBBB
205   srliw t2, xINST, 8    // t2 := AA
206   FETCH_ADVANCE_INST 5  // advance xPC, load xINST
207   GET_INST_OPCODE t3    // t3 := next opcode
208   SET_VREG_WIDE t1, t2, z0=t0
209                         // fp[AA] := BBBBBBBBBBBBBBBB
210   GOTO_OPCODE t3        // continue to next
211
212// const-wide/high16 vAA, #+BBBB000000000000
213// Format 21h: AA|19 BBBB
214%def op_const_wide_high16():
215   FETCH t1, count=1     // t1 := BBBB
216   srliw t2, xINST, 8    // t2 := AA
217   slli t1, t1, 48       // t1 := BBBB000000000000
218   FETCH_ADVANCE_INST 2  // advance xPC, load xINST
219   GET_INST_OPCODE t3    // t3 := next opcode
220   SET_VREG_WIDE t1, t2, z0=t0
221                         // fp[AA] := BBBB000000000000
222   GOTO_OPCODE t3        // continue to next
223
224
225// const-string vAA, string@BBBB
226// Format 21c: AA|1a BBBB
227%def op_const_string(jumbo=False):
228   // Fast path: string from thread-local cache.
229   FETCH_FROM_THREAD_CACHE /*object*/a0, .L${opcode}_slow, t0, t1
230   TEST_IF_MARKING t2, .L${opcode}_mark
231
232.L${opcode}_resume:
233   srliw t0, xINST, 8  // t0 := AA
234%  code_units = "3" if jumbo else "2"
235   FETCH_ADVANCE_INST $code_units
236   SET_VREG_OBJECT a0, t0, z0=t1
237   GET_INST_OPCODE t0
238   GOTO_OPCODE t0
239
240.L${opcode}_mark:
241   call art_quick_read_barrier_mark_reg10  // a0, string
242   j .L${opcode}_resume
243.L${opcode}_slow:
244   EXPORT_PC
245   mv a0, xSELF
246   ld a1, (sp)  // caller ArtMethod*
247   mv a2, xPC
248   call nterp_load_object  // return a0 := string
249   j .L${opcode}_resume
250
251
252// const-string/jumbo vAA, string@BBBBBBBB
253// Format 31c: AA|1b BBBB(lo) BBBB(hi)
254%def op_const_string_jumbo():
255%  op_const_string(jumbo=True)
256
257// const-class vAA, type@BBBB
258// Format 21c: AA|1c BBBB
259%def op_const_class():
260   // Fast path: klass reference from thread-local cache.
261   FETCH_FROM_THREAD_CACHE /*object*/a0, .L${opcode}_slow, t0, t1
262   TEST_IF_MARKING t2, .L${opcode}_mark
263
264.L${opcode}_resume:
265   srliw t0, xINST, 8  // t0 := AA
266   FETCH_ADVANCE_INST 2
267   SET_VREG_OBJECT a0, t0, z0=t1
268   GET_INST_OPCODE t0
269   GOTO_OPCODE t0
270
271.L${opcode}_mark:
272   call art_quick_read_barrier_mark_reg10  // a0, klass
273   j .L${opcode}_resume
274.L${opcode}_slow:
275   EXPORT_PC
276   mv a0, xSELF
277   ld a1, (sp)  // caller ArtMethod*
278   mv a2, xPC
279   call nterp_get_class  // return a0 := klass
280   j .L${opcode}_resume
281
282
283// const-method-handle vAA, method_handle@BBBB
284// Format 21c: AA|fe BBBB
285%def op_const_method_handle():
286   // Method handle and method type are not cached, just call helper directly.
287   EXPORT_PC
288   mv a0, xSELF
289   ld a1, (sp)  // caller ArtMethod*
290   mv a2, xPC
291   call nterp_load_object  // return a0 := method handle or method type
292   srliw t0, xINST, 8  // t0 := AA
293   FETCH_ADVANCE_INST 2
294   SET_VREG_OBJECT a0, t0, z0=t1
295   GET_INST_OPCODE t0
296   GOTO_OPCODE t0
297
298
299// const-method-type vAA, proto@BBBB
300// Format 21c: AA|ff BBBB
301%def op_const_method_type():
302%  op_const_method_handle()
303
304
305// monitor-enter vAA
306// Format 11x: AA|1d
307// Acquire the monitor for the indicated object.
308%def op_monitor_enter():
309   EXPORT_PC
310   srliw t0, xINST, 8  // t0 := AA
311   GET_VREG_OBJECT a0, t0
312   call art_quick_lock_object  // arg a0
313   FETCH_ADVANCE_INST 1
314   GET_INST_OPCODE t0
315   GOTO_OPCODE t0
316
317
318// monitor-exit vAA
319// Format 11x: AA|1e
320// Release the monitor for the indicated object.
321// Note: If this instruction needs to throw an exception, it must do so as if the pc has
322//       already advanced past the instruction. It may be useful to think of this as the instruction
323//       successfully executing (in a sense), and the exception getting thrown after the instruction
324//       but before the next one gets a chance to run. This definition makes it possible for a
325//       method to use a monitor cleanup catch-all (e.g., finally) block as the monitor cleanup for
326//       that block itself, as a way to handle the arbitrary exceptions that might get thrown due to
327//       the historical implementation of Thread.stop(), while still managing to have proper monitor
328//       hygiene.
329%def op_monitor_exit():
330   EXPORT_PC
331   srliw t0, xINST, 8  // t0 := AA
332   GET_VREG_OBJECT a0, t0
333   call art_quick_unlock_object  // arg a0
334   FETCH_ADVANCE_INST 1
335   GET_INST_OPCODE t0
336   GOTO_OPCODE t0
337
338
339%def op_unused_3e():
340%  unused()
341
342%def op_unused_3f():
343%  unused()
344
345%def op_unused_40():
346%  unused()
347
348%def op_unused_41():
349%  unused()
350
351%def op_unused_42():
352%  unused()
353
354%def op_unused_43():
355%  unused()
356
357%def op_unused_73():
358%  unused()
359
360%def op_unused_79():
361%  unused()
362
363%def op_unused_7a():
364%  unused()
365
366%def op_unused_e3():
367%  unused()
368
369%def op_unused_e4():
370%  unused()
371
372%def op_unused_e5():
373%  unused()
374
375%def op_unused_e6():
376%  unused()
377
378%def op_unused_e7():
379%  unused()
380
381%def op_unused_e8():
382%  unused()
383
384%def op_unused_e9():
385%  unused()
386
387%def op_unused_ea():
388%  unused()
389
390%def op_unused_eb():
391%  unused()
392
393%def op_unused_ec():
394%  unused()
395
396%def op_unused_ed():
397%  unused()
398
399%def op_unused_ee():
400%  unused()
401
402%def op_unused_ef():
403%  unused()
404
405%def op_unused_f0():
406%  unused()
407
408%def op_unused_f1():
409%  unused()
410
411%def op_unused_f2():
412%  unused()
413
414%def op_unused_f3():
415%  unused()
416
417%def op_unused_f4():
418%  unused()
419
420%def op_unused_f5():
421%  unused()
422
423%def op_unused_f6():
424%  unused()
425
426%def op_unused_f7():
427%  unused()
428
429%def op_unused_f8():
430%  unused()
431
432%def op_unused_f9():
433%  unused()
434
435%def op_unused_fc():
436%  unused()
437
438%def op_unused_fd():
439%  unused()
440
441