1%def unused():
2    int3
3
4%def op_const():
5    /* const vAA, #+BBBBbbbb */
6    movl    2(rPC), %eax                    # grab all 32 bits at once
7    SET_VREG %eax, rINST                    # vAA<- eax
8    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
9
10%def op_const_16():
11    /* const/16 vAA, #+BBBB */
12    movswl  2(rPC), %ecx                    # ecx <- ssssBBBB
13    SET_VREG %ecx, rINST                    # vAA <- ssssBBBB
14    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15
16%def op_const_4():
17    /* const/4 vA, #+B */
18    movsbl  rINSTbl, %eax                   # eax <-ssssssBx
19    andl    MACRO_LITERAL(0xf), rINST       # rINST <- A
20    sarl    MACRO_LITERAL(4), %eax
21    SET_VREG %eax, rINST
22    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
23
24%def op_const_high16():
25    /* const/high16 vAA, #+BBBB0000 */
26    movzwl  2(rPC), %eax                    # eax <- 0000BBBB
27    sall    MACRO_LITERAL(16), %eax         # eax <- BBBB0000
28    SET_VREG %eax, rINST                    # vAA <- eax
29    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
30
31%def op_const_object(jumbo="0", helper="nterp_load_object"):
32   // Fast-path which gets the object from thread-local cache.
33%  fetch_from_thread_cache("%eax", miss_label="2f")
34   cmpl MACRO_LITERAL(0), rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
35   jne 3f
361:
37   SET_VREG_OBJECT %eax, rINST             # vAA <- value
38   .if $jumbo
39   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
40   .else
41   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
42   .endif
432:
44   EXPORT_PC
45   movl rSELF:THREAD_SELF_OFFSET, ARG0
46   movl 0(%esp), ARG1
47   movl rPC, ARG2
48   call SYMBOL($helper)
49   jmp 1b
503:
51   // 00 is %eax
52   call art_quick_read_barrier_mark_reg00
53   jmp 1b
54
55%def op_const_class():
56%  op_const_object(jumbo="0", helper="nterp_get_class")
57
58%def op_const_method_handle():
59%  op_const_object(jumbo="0")
60
61%def op_const_method_type():
62%  op_const_object(jumbo="0")
63
64%def op_const_string():
65   /* const/string vAA, String@BBBB */
66%  op_const_object(jumbo="0")
67
68%def op_const_string_jumbo():
69   /* const/string vAA, String@BBBBBBBB */
70%  op_const_object(jumbo="1")
71
72%def op_const_wide():
73    /* const-wide vAA, #+HHHHhhhhBBBBbbbb */
74    movl    2(rPC), %eax                    # eax <- lsw
75    movzbl  rINSTbl, %ecx                   # ecx <- AA
76    movl    6(rPC), rINST                   # rINST <- msw
77    SET_VREG %eax, %ecx
78    SET_VREG_HIGH  rINST, %ecx
79    ADVANCE_PC_FETCH_AND_GOTO_NEXT 5
80
81%def op_const_wide_16():
82    /* const-wide/16 vAA, #+BBBB */
83    movswl  2(rPC), %eax                    # eax <- ssssBBBB
84    movl    rIBASE, %ecx                    # preserve rIBASE (cdq trashes it)
85    cdq                                     # rIBASE:eax <- ssssssssssssBBBB
86    SET_VREG_HIGH rIBASE, rINST             # store msw
87    SET_VREG %eax, rINST                    # store lsw
88    movl    %ecx, rIBASE                    # restore rIBASE
89    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
90
91%def op_const_wide_32():
92    /* const-wide/32 vAA, #+BBBBbbbb */
93    movl    2(rPC), %eax                    # eax <- BBBBbbbb
94    movl    rIBASE, %ecx                    # preserve rIBASE (cdq trashes it)
95    cdq                                     # rIBASE:eax <- ssssssssssssBBBB
96    SET_VREG_HIGH rIBASE, rINST             # store msw
97    SET_VREG %eax, rINST                    # store lsw
98    movl    %ecx, rIBASE                    # restore rIBASE
99    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
100
101%def op_const_wide_high16():
102    /* const-wide/high16 vAA, #+BBBB000000000000 */
103    movzwl  2(rPC), %eax                    # eax <- 0000BBBB
104    sall    $$16, %eax                      # eax <- BBBB0000
105    SET_VREG_HIGH %eax, rINST               # v[AA+1] <- eax
106    xorl    %eax, %eax
107    SET_VREG %eax, rINST                    # v[AA+0] <- eax
108    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
109
110%def op_monitor_enter():
111/*
112 * Synchronize on an object.
113 */
114    /* monitor-enter vAA */
115    EXPORT_PC
116    GET_VREG ARG0, rINST
117    call art_quick_lock_object
118    RESTORE_IBASE
119    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
120
121%def op_monitor_exit():
122/*
123 * Unlock an object.
124 *
125 * Exceptions that occur when unlocking a monitor need to appear as
126 * if they happened at the following instruction.  See the Dalvik
127 * instruction spec.
128 */
129    /* monitor-exit vAA */
130    EXPORT_PC
131    GET_VREG ARG0, rINST
132    call art_quick_unlock_object
133    RESTORE_IBASE
134    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
135
136%def op_move(is_object="0"):
137    /* for move, move-object, long-to-int */
138    /* op vA, vB */
139    movl    rINST, %eax                     # eax <- BA
140    andb    $$0xf, %al                      # eax <- A
141    shrl    $$4, rINST                      # rINST <- B
142    GET_VREG %ecx, rINST
143    .if $is_object
144    SET_VREG_OBJECT %ecx, %eax              # fp[A] <- fp[B]
145    .else
146    SET_VREG %ecx, %eax                     # fp[A] <- fp[B]
147    .endif
148    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
149
150%def op_move_16(is_object="0"):
151    /* for: move/16, move-object/16 */
152    /* op vAAAA, vBBBB */
153    movzwl  4(rPC), %ecx                    # ecx <- BBBB
154    movzwl  2(rPC), %eax                    # eax <- AAAA
155    GET_VREG %ecx, %ecx
156    .if $is_object
157    SET_VREG_OBJECT %ecx, %eax              # fp[A] <- fp[B]
158    .else
159    SET_VREG %ecx, %eax                     # fp[A] <- fp[B]
160    .endif
161    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
162
163%def op_move_exception():
164    /* move-exception vAA */
165    movl    rSELF:THREAD_EXCEPTION_OFFSET, %eax
166    SET_VREG_OBJECT %eax, rINST            # fp[AA] <- exception object
167    movl    $$0, rSELF:THREAD_EXCEPTION_OFFSET
168    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
169
170%def op_move_from16(is_object="0"):
171    /* for: move/from16, move-object/from16 */
172    /* op vAA, vBBBB */
173    movzwl  2(rPC), %eax                    # eax <- BBBB
174    GET_VREG %ecx, %eax                     # ecx <- fp[BBBB]
175    .if $is_object
176    SET_VREG_OBJECT %ecx, rINST             # fp[A] <- fp[B]
177    .else
178    SET_VREG %ecx, rINST                    # fp[A] <- fp[B]
179    .endif
180    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
181
182%def op_move_object():
183%  op_move(is_object="1")
184
185%def op_move_object_16():
186%  op_move_16(is_object="1")
187
188%def op_move_object_from16():
189%  op_move_from16(is_object="1")
190
191%def op_move_result(is_object="0"):
192    /* for: move-result, move-result-object */
193    /* op vAA */
194    .if $is_object
195    SET_VREG_OBJECT %eax, rINST             # fp[A] <- fp[B]
196    .else
197    SET_VREG %eax, rINST                    # fp[A] <- fp[B]
198    .endif
199    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
200
201%def op_move_result_object():
202%  op_move_result(is_object="1")
203
204%def op_move_result_wide():
205    /* move-result-wide vAA */
206    SET_VREG %eax, rINST
207    LOAD_WIDE_RETURN %eax
208    SET_VREG_HIGH %eax, rINST
209    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
210
211%def op_move_wide():
212    /* move-wide vA, vB */
213    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
214    movzbl  rINSTbl, %ecx                   # ecx <- BA
215    sarl    $$4, rINST                      # rINST <- B
216    andb    $$0xf, %cl                      # ecx <- A
217    GET_WIDE_FP_VREG %xmm0, rINST           # xmm0 <- v[B]
218    SET_WIDE_FP_VREG %xmm0, %ecx            # v[A] <- xmm0
219    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
220
221%def op_move_wide_16():
222    /* move-wide/16 vAAAA, vBBBB */
223    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
224    movzwl  4(rPC), %ecx                    # ecx<- BBBB
225    movzwl  2(rPC), %eax                    # eax<- AAAA
226    GET_WIDE_FP_VREG %xmm0, %ecx            # xmm0 <- v[B]
227    SET_WIDE_FP_VREG %xmm0, %eax            # v[A] <- xmm0
228    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
229
230%def op_move_wide_from16():
231    /* move-wide/from16 vAA, vBBBB */
232    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
233    movzwl  2(rPC), %ecx                    # ecx <- BBBB
234    movzbl  rINSTbl, %eax                   # eax <- AAAA
235    GET_WIDE_FP_VREG %xmm0, %ecx            # xmm0 <- v[B]
236    SET_WIDE_FP_VREG %xmm0, %eax            # v[A] <- xmm0
237    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
238
239%def op_nop():
240    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
241
242%def op_unused_3e():
243%  unused()
244
245%def op_unused_3f():
246%  unused()
247
248%def op_unused_40():
249%  unused()
250
251%def op_unused_41():
252%  unused()
253
254%def op_unused_42():
255%  unused()
256
257%def op_unused_43():
258%  unused()
259
260%def op_unused_73():
261%  unused()
262
263%def op_unused_79():
264%  unused()
265
266%def op_unused_7a():
267%  unused()
268
269%def op_unused_e3():
270%  unused()
271
272%def op_unused_e4():
273%  unused()
274
275%def op_unused_e5():
276%  unused()
277
278%def op_unused_e6():
279%  unused()
280
281%def op_unused_e7():
282%  unused()
283
284%def op_unused_e8():
285%  unused()
286
287%def op_unused_e9():
288%  unused()
289
290%def op_unused_ea():
291%  unused()
292
293%def op_unused_eb():
294%  unused()
295
296%def op_unused_ec():
297%  unused()
298
299%def op_unused_ed():
300%  unused()
301
302%def op_unused_ee():
303%  unused()
304
305%def op_unused_ef():
306%  unused()
307
308%def op_unused_f0():
309%  unused()
310
311%def op_unused_f1():
312%  unused()
313
314%def op_unused_f2():
315%  unused()
316
317%def op_unused_f3():
318%  unused()
319
320%def op_unused_f4():
321%  unused()
322
323%def op_unused_f5():
324%  unused()
325
326%def op_unused_f6():
327%  unused()
328
329%def op_unused_f7():
330%  unused()
331
332%def op_unused_f8():
333%  unused()
334
335%def op_unused_f9():
336%  unused()
337
338%def op_unused_fc():
339%  unused()
340
341%def op_unused_fd():
342%  unused()
343