1/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_riscv64.S"
18
19
20// 8 argument GPRS: a0 - a7 and 8 argument FPRs: fa0 - fa7
21#define ALL_ARGS_SIZE (8 * (8 + 8))
22
23
24.macro SAVE_ALL_ARGS_INCREASE_FRAME extra_space
25    // Reserve space for all argument registers, plus the extra space.
26    INCREASE_FRAME (ALL_ARGS_SIZE + \extra_space)
27
28    // Argument GPRs a0 - a7.
29    sd    a0, (8*0)(sp)
30    sd    a1, (8*1)(sp)
31    sd    a2, (8*2)(sp)
32    sd    a3, (8*3)(sp)
33    sd    a4, (8*4)(sp)
34    sd    a5, (8*5)(sp)
35    sd    a6, (8*6)(sp)
36    sd    a7, (8*7)(sp)
37
38    // Argument FPRs fa0 - fa7.
39    fsd   fa0, (8*8)(sp)
40    fsd   fa1, (8*9)(sp)
41    fsd   fa2, (8*10)(sp)
42    fsd   fa3, (8*11)(sp)
43    fsd   fa4, (8*12)(sp)
44    fsd   fa5, (8*13)(sp)
45    fsd   fa6, (8*14)(sp)
46    fsd   fa7, (8*15)(sp)
47.endm
48
49
50.macro RESTORE_ALL_ARGS_DECREASE_FRAME extra_space
51    // Argument GPRs a0 - a7.
52    ld    a0, (8*0)(sp)
53    ld    a1, (8*1)(sp)
54    ld    a2, (8*2)(sp)
55    ld    a3, (8*3)(sp)
56    ld    a4, (8*4)(sp)
57    ld    a5, (8*5)(sp)
58    ld    a6, (8*6)(sp)
59    ld    a7, (8*7)(sp)
60
61    // Argument FPRs fa0 - fa7.
62    fld   fa0, (8*8)(sp)
63    fld   fa1, (8*9)(sp)
64    fld   fa2, (8*10)(sp)
65    fld   fa3, (8*11)(sp)
66    fld   fa4, (8*12)(sp)
67    fld   fa5, (8*13)(sp)
68    fld   fa6, (8*14)(sp)
69    fld   fa7, (8*15)(sp)
70
71    DECREASE_FRAME (ALL_ARGS_SIZE + \extra_space)
72.endm
73
74
75.macro JNI_SAVE_MANAGED_ARGS_TRAMPOLINE name, cxx_name, arg1 = "none"
76    .extern \cxx_name
77ENTRY \name
78    // Save args and RA.
79    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*RA*/ 8
80    SAVE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
81    // Call `cxx_name()`.
82    .ifnc \arg1, none
83        mv    a0, \arg1
84    .endif
85    call   \cxx_name
86    // Restore RA and args and return.
87    RESTORE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
88    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*RA*/ 8
89    ret
90END \name
91.endm
92
93
94.macro JNI_SAVE_RETURN_VALUE_TRAMPOLINE name, cxx_name, arg1, arg2 = "none"
95    .extern \cxx_name
96ENTRY \name
97    // Save return registers and return address.
98    INCREASE_FRAME 32
99    sd    a0, 0(sp)
100    fsd   fa0, 8(sp)
101    SAVE_GPR ra, 24
102    // Call `cxx_name()`.
103    mv    a0, \arg1
104    .ifnc \arg2, none
105        mv    a1, \arg2
106    .endif
107    call  \cxx_name
108    // Restore result registers and return.
109    ld    a0, 0(sp)
110    fld   fa0, 8(sp)
111    RESTORE_GPR ra, 24
112    DECREASE_FRAME 32
113    ret
114END \name
115.endm
116
117
118// JNI dlsym lookup stub.
119.extern artFindNativeMethod
120.extern artFindNativeMethodRunnable
121ENTRY art_jni_dlsym_lookup_stub
122    SAVE_ALL_ARGS_INCREASE_FRAME 2*8
123    SAVE_GPR fp, (ALL_ARGS_SIZE + 0)
124    SAVE_GPR ra, (ALL_ARGS_SIZE + 8)
125    add  fp, sp, ALL_ARGS_SIZE
126
127    // Call artFindNativeMethod for normal native.
128    // Call artFindNativeMethodRunnable for @FastNative or @CriticalNative.
129    // Both functions have a single argument: Thread::Current() in a0.
130    mv   a0, xSELF
131    ld   t0, THREAD_TOP_QUICK_FRAME_OFFSET(a0)   // uintptr_t tagged_quick_frame
132    andi t0, t0, ~TAGGED_JNI_SP_MASK             // ArtMethod** sp
133    ld   t0, (t0)                                // ArtMethod* method
134    lw   t0, ART_METHOD_ACCESS_FLAGS_OFFSET(t0)  // uint32_t access_flags
135    li   t1, (ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE)
136    and  t0, t0, t1
137    bnez t0, .Llookup_stub_fast_or_critical_native
138    call artFindNativeMethod
139    j    .Llookup_stub_continue
140
141.Llookup_stub_fast_or_critical_native:
142    call  artFindNativeMethodRunnable
143
144.Llookup_stub_continue:
145    mv    t0, a0  // store result in a temp reg.
146    RESTORE_GPR fp, (ALL_ARGS_SIZE + 0)
147    RESTORE_GPR ra, (ALL_ARGS_SIZE + 8)
148    RESTORE_ALL_ARGS_DECREASE_FRAME 2*8
149
150    beqz  t0, 1f  // is method code null?
151    jr    t0      // if non-null, tail call to method code.
1521:
153    ret           // restore regs and return to caller to handle exception.
154END art_jni_dlsym_lookup_stub
155
156
157// JNI dlsym lookup stub for @CriticalNative.
158ENTRY art_jni_dlsym_lookup_critical_stub
159    // The hidden arg holding the tagged method is t0 (loaded by compiled JNI stub, compiled
160    // managed code, or `art_quick_generic_jni_trampoline`). Bit 0 set means generic JNI.
161    // For generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub.
162    andi  t6, t0, 1
163    bnez  t6, art_jni_dlsym_lookup_stub
164
165    // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg.
166    SAVE_ALL_ARGS_INCREASE_FRAME 2*8
167    SAVE_GPR t0, (ALL_ARGS_SIZE + 0)
168    SAVE_GPR ra, (ALL_ARGS_SIZE + 8)
169
170    // Call artCriticalNativeFrameSize(method, caller_pc)
171    mv    a0, t0  // a0 := method (from hidden arg)
172    mv    a1, ra  // a1 := caller_pc
173    call  artCriticalNativeFrameSize
174
175    // Move frame size to T2.
176    mv    t2, a0
177
178    // Restore args, the hidden arg and caller PC.
179    RESTORE_GPR t0, (ALL_ARGS_SIZE + 0)
180    RESTORE_GPR ra, (ALL_ARGS_SIZE + 8)
181    RESTORE_ALL_ARGS_DECREASE_FRAME 2*8
182
183    // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime
184    // method or for a GenericJNI frame which is similar but has a native method and a tag.
185    // Add space for RA and padding to keep the stack 16-byte aligned.
186    INCREASE_FRAME (FRAME_SIZE_SAVE_REFS_AND_ARGS + 16)
187
188    // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame.
189    // If we're coming from JNI stub with tail call, it is RA. If we're coming from
190    // JNI stub that saved the return address, it will be the last value we copy below.
191    // If we're coming directly from compiled code, it is RA, set further down.
192    mv    t4, ra
193
194    // Move the stack args if any. Calculate the base address of the managed frame in the process.
195    addi  t1, sp, 16
196    beqz  t2, .Lcritical_skip_copy_args
197.Lcritical_copy_args_loop:
198    ld    t3, FRAME_SIZE_SAVE_REFS_AND_ARGS+0(t1)
199    ld    t4, FRAME_SIZE_SAVE_REFS_AND_ARGS+8(t1)
200    addi  t2, t2, -16
201    sd    t3, 0-16(t1)
202    sd    t4, 8-16(t1)
203    addi  t1, t1, 16
204    bnez  t2, .Lcritical_copy_args_loop
205.Lcritical_skip_copy_args:
206
207    // Spill registers for the SaveRefsAndArgs frame above the stack args.
208    // Note that the runtime shall not examine the args here, otherwise we would have to
209    // move them in registers and stack to account for the difference between managed and
210    // native ABIs. Do not update CFI while we hold the frame address in T1 and the values
211    // in registers are unchanged.
212    // stack slot (0*8)(t1) is for ArtMethod*
213    fsd   fa0, (1*8)(t1)
214    fsd   fa1, (2*8)(t1)
215    fsd   fa2, (3*8)(t1)
216    fsd   fa3, (4*8)(t1)
217    fsd   fa4, (5*8)(t1)
218    fsd   fa5, (6*8)(t1)
219    fsd   fa6, (7*8)(t1)
220    fsd   fa7, (8*8)(t1)
221    sd    fp,  (9*8)(t1)   // x8, frame pointer
222    // s1 (x9) is the ART thread register
223    // a0 (x10) is the method pointer
224    sd    a1,  (10*8)(t1)  // x11
225    sd    a2,  (11*8)(t1)  // x12
226    sd    a3,  (12*8)(t1)  // x13
227    sd    a4,  (13*8)(t1)  // x14
228    sd    a5,  (14*8)(t1)  // x15
229    sd    a6,  (15*8)(t1)  // x16
230    sd    a7,  (16*8)(t1)  // x17
231    sd    s2,  (17*8)(t1)  // x18
232    sd    s3,  (18*8)(t1)  // x19
233    sd    s4,  (19*8)(t1)  // x20
234    sd    s5,  (20*8)(t1)  // x21
235    sd    s6,  (21*8)(t1)  // x22
236    sd    s7,  (22*8)(t1)  // x23
237    sd    s8,  (23*8)(t1)  // x24
238    sd    s9,  (24*8)(t1)  // x25
239    sd    s10, (25*8)(t1)  // x26
240    sd    s11, (26*8)(t1)  // x27
241    sd    t4,  (27*8)(t1)  // t4: Save return address for tail call from JNI stub.
242    // (If there were any stack args, we're storing the value that's already there.
243    // For direct calls from compiled managed code, we shall overwrite this below.)
244
245    // Move the managed frame address to native callee-save register fp (x8) and update CFI.
246    mv    fp, t1
247    // Skip args FA0-FA7, A1-A7
248    CFI_EXPRESSION_BREG  8, 8, (9*8)
249    CFI_EXPRESSION_BREG 18, 8, (17*8)
250    CFI_EXPRESSION_BREG 19, 8, (18*8)
251    CFI_EXPRESSION_BREG 20, 8, (19*8)
252    CFI_EXPRESSION_BREG 21, 8, (20*8)
253    CFI_EXPRESSION_BREG 22, 8, (21*8)
254    CFI_EXPRESSION_BREG 23, 8, (22*8)
255    CFI_EXPRESSION_BREG 24, 8, (23*8)
256    CFI_EXPRESSION_BREG 25, 8, (24*8)
257    CFI_EXPRESSION_BREG 26, 8, (25*8)
258    CFI_EXPRESSION_BREG 27, 8, (26*8)
259    // The saved return PC for managed stack walk is not necessarily our RA.
260
261    // Save our return PC below the managed frame.
262    sd    ra, -__SIZEOF_POINTER__(fp)
263    CFI_EXPRESSION_BREG 1, 8, -__SIZEOF_POINTER__
264
265    lw    t2, ART_METHOD_ACCESS_FLAGS_OFFSET(t0)  // Load access flags.
266    addi  t1, fp, 1        // Prepare managed SP tagged for a GenericJNI frame.
267    slliw t2, t2, 31 - ACCESS_FLAGS_METHOD_IS_NATIVE_BIT
268    bltz  t2, .Lcritical_skip_prepare_runtime_method
269
270    // When coming from a compiled method, the return PC for managed stack walk is RA.
271    // (When coming from a compiled stub, the correct return PC is already stored above.)
272    sd    ra, (FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)(fp)
273
274    // Replace the target method with the SaveRefsAndArgs runtime method.
275    LOAD_RUNTIME_INSTANCE t0
276    ld    t0, RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET(t0)
277
278    mv    t1, fp           // Prepare untagged managed SP for the runtime method.
279
280.Lcritical_skip_prepare_runtime_method:
281    // Store the method on the bottom of the managed frame.
282    sd    t0, (fp)
283
284    // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame.
285    sd    t1, THREAD_TOP_QUICK_FRAME_OFFSET(xSELF)
286
287    // Preserve the native arg register A0 in callee-save register S2 (x18) which was saved above.
288    mv    s2, a0
289
290    // Call artFindNativeMethodRunnable()
291    mv    a0, xSELF   // pass Thread::Current()
292    call  artFindNativeMethodRunnable
293
294    // Store result in scratch reg.
295    mv    t0, a0
296
297    // Restore the native arg register A0.
298    mv    a0, s2
299
300    // Restore our return PC.
301    RESTORE_GPR_BASE fp, ra, -__SIZEOF_POINTER__
302
303    // Remember the end of out args before restoring FP.
304    addi  t1, fp, -16
305
306    // Restore arg registers.
307    fld   fa0, (1*8)(fp)
308    fld   fa1, (2*8)(fp)
309    fld   fa2, (3*8)(fp)
310    fld   fa3, (4*8)(fp)
311    fld   fa4, (5*8)(fp)
312    fld   fa5, (6*8)(fp)
313    fld   fa6, (7*8)(fp)
314    fld   fa7, (8*8)(fp)
315    // fp (x8) is restored last to keep CFI data valid until then.
316    // s1 (x9) is the ART thread register
317    // a0 (x10) is the method pointer
318    ld    a1,  (10*8)(fp)  // x11
319    ld    a2,  (11*8)(fp)  // x12
320    ld    a3,  (12*8)(fp)  // x13
321    ld    a4,  (13*8)(fp)  // x14
322    ld    a5,  (14*8)(fp)  // x15
323    ld    a6,  (15*8)(fp)  // x16
324    ld    a7,  (16*8)(fp)  // x17
325    RESTORE_GPR_BASE fp, s2,  (17*8)  // x18
326    RESTORE_GPR_BASE fp, s3,  (18*8)  // x19
327    RESTORE_GPR_BASE fp, s4,  (19*8)  // x20
328    RESTORE_GPR_BASE fp, s5,  (20*8)  // x21
329    RESTORE_GPR_BASE fp, s6,  (21*8)  // x22
330    RESTORE_GPR_BASE fp, s7,  (22*8)  // x23
331    RESTORE_GPR_BASE fp, s8,  (23*8)  // x24
332    RESTORE_GPR_BASE fp, s9,  (24*8)  // x25
333    RESTORE_GPR_BASE fp, s10, (25*8)  // x26
334    RESTORE_GPR_BASE fp, s11, (26*8)  // x27
335    RESTORE_GPR_BASE fp, fp,  (9*8)   // fp (x8) is restored last
336
337    // Check for exception before moving args back to keep the return PC for managed stack walk.
338    CFI_REMEMBER_STATE
339    beqz  t0, .Lcritical_deliver_exception
340
341    // Move stack args to their original place.
342    beq   t1, sp, .Lcritical_skip_copy_args_back
343    sub   t2, t1, sp
344.Lcritical_copy_args_back_loop:
345    ld    t3, 0-16(t1)
346    ld    t4, 8-16(t1)
347    addi  t2, t2, -16
348    sd    t3, FRAME_SIZE_SAVE_REFS_AND_ARGS+0(t1)
349    sd    t4, FRAME_SIZE_SAVE_REFS_AND_ARGS+8(t1)
350    addi  t1, t1, -16
351    bnez  t2, .Lcritical_copy_args_back_loop
352.Lcritical_skip_copy_args_back:
353
354    // Remove the frame reservation.
355    DECREASE_FRAME (FRAME_SIZE_SAVE_REFS_AND_ARGS + 16)
356
357    // Do the tail call.
358    jr    t0
359
360.Lcritical_deliver_exception:
361    CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_REFS_AND_ARGS + 16
362    // If this is called from a method that catches the exception, all callee-save registers need
363    // to be saved, so that the exception handling code can read them in case they contain live
364    // values later used by that method. This includes callee-save FP registers which are not
365    // saved in a SaveRefsAndArgs frame, so we cannot reuse the managed frame we have built above.
366    // That's why we checked for exception after restoring registers from that frame.
367    // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this
368    // point but keep the area allocated for stack args to keep CFA definition simple.
369#if FRAME_SIZE_SAVE_ALL_CALLEE_SAVES > FRAME_SIZE_SAVE_REFS_AND_ARGS
370#error "Expanding stack frame from kSaveRefsAndArgs to kSaveAllCalleeSaves is not implemented."
371#endif
372    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
373
374    // Calculate the base address of the managed frame.
375    addi  t1, t1, 16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
376
377    // Spill registers for the SaveAllCalleeSaves frame above the stack args area. Do not update
378    // CFI while we hold the frame address in T1 and the values in registers are unchanged.
379    // stack slot (0*8)(t1) is for ArtMethod*
380    // stack slot (1*8)(t1) is for padding
381    // FP callee-saves.
382    fsd   fs0,  (8*2)(t1)   // f8
383    fsd   fs1,  (8*3)(t1)   // f9
384    fsd   fs2,  (8*4)(t1)   // f18
385    fsd   fs3,  (8*5)(t1)   // f19
386    fsd   fs4,  (8*6)(t1)   // f20
387    fsd   fs5,  (8*7)(t1)   // f21
388    fsd   fs6,  (8*8)(t1)   // f22
389    fsd   fs7,  (8*9)(t1)   // f23
390    fsd   fs8,  (8*10)(t1)  // f24
391    fsd   fs9,  (8*11)(t1)  // f25
392    fsd   fs10, (8*12)(t1)  // f26
393    fsd   fs11, (8*13)(t1)  // f27
394    // GP callee-saves
395    sd    s0,  (8*14)(t1)  // x8/fp, frame pointer
396    // s1 (x9) is the ART thread register
397    sd    s2,  (8*15)(t1)  // x18
398    sd    s3,  (8*16)(t1)  // x19
399    sd    s4,  (8*17)(t1)  // x20
400    sd    s5,  (8*18)(t1)  // x21
401    sd    s6,  (8*19)(t1)  // x22
402    sd    s7,  (8*20)(t1)  // x23
403    sd    s8,  (8*21)(t1)  // x24
404    sd    s9,  (8*22)(t1)  // x25
405    sd    s10, (8*23)(t1)  // x26
406    sd    s11, (8*24)(t1)  // x27
407    // Keep the caller PC for managed stack walk.
408
409    // Move the managed frame address to native callee-save register fp (x8) and update CFI.
410    mv    fp, t1
411    CFI_EXPRESSION_BREG  8, 8, (14*8)  // fp/x8: The base register for these CFI expressions.
412    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 8, 8, (8*2)    // fs0/f8
413    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 9, 8, (8*3)    // fs1/f9
414    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 18, 8, (8*4)   // fs2/f18
415    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 19, 8, (8*5)   // fs3/f19
416    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 20, 8, (8*6)   // fs4/f20
417    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 21, 8, (8*7)   // fs5/f21
418    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 22, 8, (8*8)   // fs6/f22
419    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 23, 8, (8*9)   // fs7/f23
420    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 24, 8, (8*10)  // fs8/f24
421    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 25, 8, (8*11)  // fs9/f25
422    CFI_EXPRESSION_BREG  /*FP reg*/ 32 + 26, 8, (8*12)  // fs10/f26
423    // CFI expression for fp (x8) already emitted above.
424    CFI_EXPRESSION_BREG 18, 8, (15*8)  // s2/x18
425    CFI_EXPRESSION_BREG 19, 8, (16*8)  // s3/x19
426    CFI_EXPRESSION_BREG 20, 8, (17*8)  // s4/x20
427    CFI_EXPRESSION_BREG 21, 8, (18*8)  // s5/x21
428    CFI_EXPRESSION_BREG 22, 8, (19*8)  // s6/x22
429    CFI_EXPRESSION_BREG 23, 8, (20*8)  // s7/x23
430    CFI_EXPRESSION_BREG 24, 8, (21*8)  // s8/x24
431    CFI_EXPRESSION_BREG 25, 8, (22*8)  // s9/x25
432    CFI_EXPRESSION_BREG 26, 8, (23*8)  // s10/x26
433    CFI_EXPRESSION_BREG 27, 8, (24*8)  // s11/x27
434    // The saved return PC for managed stack walk is not necessarily our RA.
435
436    // Save our return PC below the managed frame.
437    sd    ra, -__SIZEOF_POINTER__(fp)
438    CFI_EXPRESSION_BREG 1, 8, -__SIZEOF_POINTER__
439
440    // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame.
441    LOAD_RUNTIME_INSTANCE t0
442    ld    t0, RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET(t0)
443    sd    t0, (fp)
444
445    // Place the managed frame SP in Thread::Current()->top_quick_frame.
446    sd    fp, THREAD_TOP_QUICK_FRAME_OFFSET(xSELF)
447
448    DELIVER_PENDING_EXCEPTION_FRAME_READY
449END art_jni_dlsym_lookup_critical_stub
450
451    /*
452     * Read barrier for the method's declaring class needed by JNI stub for static methods.
453     * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.)
454     */
455// The method argument is already in a0 for call to `artJniReadBarrier(ArtMethod*)`.
456JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier
457
458    /*
459     * Trampoline to `artJniMethodStart()` that preserves all managed arguments.
460     */
461JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, xSELF
462
463    /*
464     * Trampoline to `artJniMethodEntryHook` that preserves all managed arguments.
465     */
466JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_entry_hook, artJniMethodEntryHook, xSELF
467
468    /*
469     * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments.
470     */
471JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_monitored_method_start, artJniMonitoredMethodStart, xSELF
472
473    /*
474     * Trampoline to `artJniMethodEnd()` that preserves all return registers.
475     */
476JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, xSELF
477
478    /*
479     * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers.
480     */
481JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_monitored_method_end, artJniMonitoredMethodEnd, xSELF
482
483    /*
484     * Entry from JNI stub that tries to lock the object in a fast path and
485     * calls `artLockObjectFromCode()` (the same as for managed code) for the
486     * difficult cases, may block for GC.
487     * Custom calling convention:
488     *     T0 holds the non-null object to lock.
489     *     Callee-save registers have been saved and can be used as temporaries.
490     *     All argument registers need to be preserved.
491     */
492ENTRY art_jni_lock_object
493    LOCK_OBJECT_FAST_PATH t0, art_jni_lock_object_no_inline, /*can_be_null*/ 0
494END art_jni_lock_object
495
496    /*
497     * Entry from JNI stub that calls `artLockObjectFromCode()`
498     * (the same as for managed code), may block for GC.
499     * Custom calling convention:
500     *     T0 holds the non-null object to lock.
501     *     Callee-save registers have been saved and can be used as temporaries.
502     *     All argument registers need to be preserved.
503     */
504    .extern artLockObjectFromCode
505ENTRY art_jni_lock_object_no_inline
506    // This is also the slow path for art_jni_lock_object.
507    // Save args and RA.
508    SAVE_ALL_ARGS_INCREASE_FRAME /*padding*/ 8 + /*RA*/ 8
509    SAVE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
510    // Call `artLockObjectFromCode()`.
511    mv    a0, t0                     // Pass the object to lock.
512    mv    a1, xSELF                  // Pass Thread::Current().
513    call  artLockObjectFromCode      // (Object* obj, Thread*)
514    // Restore return address.
515    RESTORE_GPR ra, (ALL_ARGS_SIZE + /*padding*/ 8)
516    // Check result.
517    bnez   a0, 1f
518    // Restore register args a0-a7, fa0-fa7 and return.
519    RESTORE_ALL_ARGS_DECREASE_FRAME /*padding*/ 8 + /*RA*/ 8
520    ret
521    .cfi_adjust_cfa_offset (ALL_ARGS_SIZE + /*padding*/ 8 + /*RA*/ 8)
5221:
523    // All args are irrelevant when throwing an exception. Remove the spill area.
524    DECREASE_FRAME (ALL_ARGS_SIZE + /*padding*/ 8 + /*RA*/ 8)
525    // Make a tail call to `artDeliverPendingExceptionFromCode()`.
526    // Rely on the JNI transition frame constructed in the JNI stub.
527    mv     a0, xSELF                           // Pass Thread::Current().
528    tail   artDeliverPendingExceptionFromCode  // (Thread*)
529END art_jni_lock_object_no_inline
530
531    /*
532     * Entry from JNI stub that tries to unlock the object in a fast path and calls
533     * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock
534     * is fatal, so we do not need to check for exceptions in the slow path.
535     * Custom calling convention:
536     *     T0 holds the non-null object to unlock.
537     *     Callee-save registers have been saved and can be used as temporaries.
538     *     Return registers a0 and fa0 need to be preserved.
539     */
540ENTRY art_jni_unlock_object
541    UNLOCK_OBJECT_FAST_PATH t0, art_jni_unlock_object_no_inline, /*can_be_null*/ 0
542END art_jni_unlock_object
543
544    /*
545     * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to
546     * unlock is fatal, so we do not need to check for exceptions.
547     * Custom calling convention:
548     *     T0 holds the non-null object to unlock.
549     *     Callee-save registers have been saved and can be used as temporaries.
550     *     Return registers a0 and fa0 need to be preserved.
551     */
552    // This is also the slow path for art_jni_unlock_object.
553JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_unlock_object_no_inline, artJniUnlockObject, t0, xSELF
554