1%def bincmp(revcmp=""):
2/*
3 * Generic two-operand compare-and-branch operation.  Provide a "revcmp"
4 * fragment that specifies the *reverse* comparison to perform, e.g.
5 * for "if-le" you would use "gt".
6 *
7 * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
8 */
9    /* if-cmp vA, vB, +CCCC */
10    movl    rINST, %ecx                     # rcx <- A+
11    sarl    $$4, rINST                      # rINST <- B
12    andb    $$0xf, %cl                      # rcx <- A
13    GET_VREG %eax, %ecx                     # eax <- vA
14    cmpl    VREG_ADDRESS(rINST), %eax       # compare (vA, vB)
15    j${revcmp} 1f
16    movswl  2(rPC), rINST                   # Get signed branch offset
17    BRANCH
181:
19    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20
21%def zcmp(revcmp=""):
22/*
23 * Generic one-operand compare-and-branch operation.  Provide a "revcmp"
24 * fragment that specifies the *reverse* comparison to perform, e.g.
25 * for "if-le" you would use "gt".
26 *
27 * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
28 */
29    /* if-cmp vAA, +BBBB */
30    cmpl    $$0, VREG_ADDRESS(rINST)        # compare (vA, 0)
31    j${revcmp} 1f
32    movswl  2(rPC), rINST                   # fetch signed displacement
33    BRANCH
341:
35    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
36
37%def op_goto():
38/*
39 * Unconditional branch, 8-bit offset.
40 *
41 * The branch distance is a signed code-unit offset, which we need to
42 * double to get a byte offset.
43 */
44    /* goto +AA */
45    movsbl  rINSTbl, rINST                  # rINST <- ssssssAA
46    BRANCH
47
48%def op_goto_16():
49/*
50 * Unconditional branch, 16-bit offset.
51 *
52 * The branch distance is a signed code-unit offset, which we need to
53 * double to get a byte offset.
54 */
55    /* goto/16 +AAAA */
56    movswl  2(rPC), rINST                   # rINST <- ssssAAAA
57    BRANCH
58
59%def op_goto_32():
60/*
61 * Unconditional branch, 32-bit offset.
62 *
63 * The branch distance is a signed code-unit offset, which we need to
64 * double to get a byte offset.
65 */
66    /* goto/32 +AAAAAAAA */
67    movl  2(rPC), rINST                   # rINST <- AAAAAAAA
68    BRANCH
69
70%def op_if_eq():
71%  bincmp(revcmp="ne")
72
73%def op_if_eqz():
74%  zcmp(revcmp="ne")
75
76%def op_if_ge():
77%  bincmp(revcmp="l")
78
79%def op_if_gez():
80%  zcmp(revcmp="l")
81
82%def op_if_gt():
83%  bincmp(revcmp="le")
84
85%def op_if_gtz():
86%  zcmp(revcmp="le")
87
88%def op_if_le():
89%  bincmp(revcmp="g")
90
91%def op_if_lez():
92%  zcmp(revcmp="g")
93
94%def op_if_lt():
95%  bincmp(revcmp="ge")
96
97%def op_if_ltz():
98%  zcmp(revcmp="ge")
99
100%def op_if_ne():
101%  bincmp(revcmp="e")
102
103%def op_if_nez():
104%  zcmp(revcmp="e")
105
106%def op_packed_switch(func="NterpDoPackedSwitch"):
107/*
108 * Handle a packed-switch or sparse-switch instruction.  In both cases
109 * we decode it and hand it off to a helper function.
110 *
111 * We don't really expect backward branches in a switch statement, but
112 * they're perfectly legal, so we check for them here.
113 *
114 * for: packed-switch, sparse-switch
115 */
116    /* op vAA, +BBBB */
117    movl    2(rPC), ARG0                # eax <- BBBBbbbb
118    leal    (rPC,ARG0,2), ARG0          # eax <- PC + BBBBbbbb*2
119    GET_VREG ARG1, rINST                # ecx <- vAA
120    pushl   ARG1
121    pushl   ARG0
122    call    SYMBOL($func)
123    addl MACRO_LITERAL(8), %esp
124    RESTORE_IBASE
125    FETCH_INST_CLEAR_OPCODE
126    movl  %eax, rINST
127    BRANCH
128
129/*
130 * Return a 32-bit value.
131 */
132%def op_return(is_object="0"):
133    GET_VREG %eax, rINST                    # eax <- vAA
134    .if !$is_object
135    // In case we're going back to compiled code, put the
136    // result also in a xmm register.
137    movd %eax, %xmm0
138    .endif
139    CFI_REMEMBER_STATE
140    movl -4(rREFS), %esp
141    DROP_PARAMETERS_SAVES
142    CFI_DEF_CFA(esp, CALLEE_SAVES_SIZE)
143    RESTORE_ALL_CALLEE_SAVES
144    ret
145    CFI_RESTORE_STATE
146    NTERP_DEF_CFA CFI_REFS
147
148%def op_return_object():
149%  op_return(is_object="1")
150
151%def op_return_void():
152    // Thread fence for constructor is a no-op on x86_64.
153    CFI_REMEMBER_STATE
154    movl -4(rREFS), %esp
155    DROP_PARAMETERS_SAVES
156    CFI_DEF_CFA(esp, CALLEE_SAVES_SIZE)
157    RESTORE_ALL_CALLEE_SAVES
158    ret
159    CFI_RESTORE_STATE
160    NTERP_DEF_CFA CFI_REFS
161
162%def op_return_wide():
163    // In case we're going back to compiled code, put the
164    // result also in a xmm register.
165    GET_WIDE_FP_VREG %xmm0, rINST
166    GET_VREG %eax, rINST        # eax <- vAA
167    GET_VREG_HIGH %edx, rINST   # edx <- vAA
168    CFI_REMEMBER_STATE
169    movl    -4(rREFS), %esp
170    DROP_PARAMETERS_SAVES
171    CFI_DEF_CFA(esp, CALLEE_SAVES_SIZE)
172    RESTORE_ALL_CALLEE_SAVES
173    ret
174    CFI_RESTORE_STATE
175    NTERP_DEF_CFA CFI_REFS
176
177%def op_sparse_switch():
178%  op_packed_switch(func="NterpDoSparseSwitch")
179
180%def op_throw():
181  EXPORT_PC
182  GET_VREG ARG0, rINST                   # eax <- vAA (exception object)
183  movl rSELF:THREAD_SELF_OFFSET, ARG1
184  call SYMBOL(art_quick_deliver_exception)
185  int3
186