1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 #include <limits>
22 
23 #include <android-base/logging.h>
24 #include <jni.h>
25 
26 #include "base/array_ref.h"
27 #include "base/bit_utils.h"
28 #include "base/casts.h"
29 #include "base/logging.h"
30 #include "base/macros.h"
31 #include "base/pointer_size.h"
32 #include "base/runtime_debug.h"
33 #include "dex/dex_file_structs.h"
34 #include "dex/modifiers.h"
35 #include "dex/primitive.h"
36 #include "interpreter/mterp/nterp.h"
37 #include "gc_root.h"
38 #include "obj_ptr.h"
39 #include "offsets.h"
40 #include "read_barrier_option.h"
41 
42 namespace art HIDDEN {
43 
44 class CodeItemDataAccessor;
45 class CodeItemDebugInfoAccessor;
46 class CodeItemInstructionAccessor;
47 class DexFile;
48 template<class T> class Handle;
49 class ImtConflictTable;
50 enum InvokeType : uint32_t;
51 union JValue;
52 template<typename T> class LengthPrefixedArray;
53 class OatQuickMethodHeader;
54 class ProfilingInfo;
55 class ScopedObjectAccessAlreadyRunnable;
56 class ShadowFrame;
57 class Signature;
58 
59 namespace mirror {
60 class Array;
61 class Class;
62 class ClassLoader;
63 class DexCache;
64 class IfTable;
65 class Object;
66 template <typename MirrorType> class ObjectArray;
67 class PointerArray;
68 class String;
69 }  // namespace mirror
70 
71 namespace detail {
72 template <char Shorty> struct ShortyTraits;
73 template <> struct ShortyTraits<'V'>;
74 template <> struct ShortyTraits<'Z'>;
75 template <> struct ShortyTraits<'B'>;
76 template <> struct ShortyTraits<'C'>;
77 template <> struct ShortyTraits<'S'>;
78 template <> struct ShortyTraits<'I'>;
79 template <> struct ShortyTraits<'J'>;
80 template <> struct ShortyTraits<'F'>;
81 template <> struct ShortyTraits<'D'>;
82 template <> struct ShortyTraits<'L'>;
83 template <char Shorty> struct HandleShortyTraits;
84 template <> struct HandleShortyTraits<'L'>;
85 }  // namespace detail
86 
87 class EXPORT ArtMethod final {
88  public:
89   // Should the class state be checked on sensitive operations?
90   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
91 
92   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
93   // constexpr, and ensure that the value is correct in art_method.cc.
94   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
95 
96   ArtMethod() : access_flags_(0), dex_method_index_(0),
97       method_index_(0), hotness_count_(0) { }
98 
99   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
100     CopyFrom(src, image_pointer_size);
101   }
102 
103   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
104                                         jobject jlr_method)
105       REQUIRES_SHARED(Locks::mutator_lock_);
106 
107   // Visit the declaring class in 'method' if it is within [start_boundary, end_boundary).
108   template<typename RootVisitorType>
109   static void VisitRoots(RootVisitorType& visitor,
110                          uint8_t* start_boundary,
111                          uint8_t* end_boundary,
112                          ArtMethod* method)
113       REQUIRES_SHARED(Locks::mutator_lock_);
114 
115   // Visit declaring classes of all the art-methods in 'array' that reside
116   // in [start_boundary, end_boundary).
117   template<PointerSize kPointerSize, typename RootVisitorType>
118   static void VisitArrayRoots(RootVisitorType& visitor,
119                               uint8_t* start_boundary,
120                               uint8_t* end_boundary,
121                               LengthPrefixedArray<ArtMethod>* array)
122       REQUIRES_SHARED(Locks::mutator_lock_);
123 
124   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
125   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
126 
127   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
128   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
129       REQUIRES_SHARED(Locks::mutator_lock_);
130 
131   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
132     return declaring_class_.AddressWithoutBarrier();
133   }
134 
135   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
136       REQUIRES_SHARED(Locks::mutator_lock_);
137 
138   bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class)
139       REQUIRES_SHARED(Locks::mutator_lock_);
140 
141   static constexpr MemberOffset DeclaringClassOffset() {
142     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
143   }
144 
145   uint32_t GetAccessFlags() const {
146     return access_flags_.load(std::memory_order_relaxed);
147   }
148 
149   // This version should only be called when it's certain there is no
150   // concurrency so there is no need to guarantee atomicity. For example,
151   // before the method is linked.
152   void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) {
153     // The following check ensures that we do not set `Intrinsics::kNone` (see b/228049006).
154     DCHECK_IMPLIES((new_access_flags & kAccIntrinsic) != 0,
155                    (new_access_flags & kAccIntrinsicBits) != 0);
156     access_flags_.store(new_access_flags, std::memory_order_relaxed);
157   }
158 
159   static constexpr MemberOffset AccessFlagsOffset() {
160     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
161   }
162 
163   // Approximate what kind of method call would be used for this method.
164   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
165 
166   // Returns true if the method is declared public.
167   bool IsPublic() const {
168     return IsPublic(GetAccessFlags());
169   }
170 
171   static bool IsPublic(uint32_t access_flags) {
172     return (access_flags & kAccPublic) != 0;
173   }
174 
175   // Returns true if the method is declared private.
176   bool IsPrivate() const {
177     return IsPrivate(GetAccessFlags());
178   }
179 
180   static bool IsPrivate(uint32_t access_flags) {
181     return (access_flags & kAccPrivate) != 0;
182   }
183 
184   // Returns true if the method is declared static.
185   bool IsStatic() const {
186     return IsStatic(GetAccessFlags());
187   }
188 
189   static bool IsStatic(uint32_t access_flags) {
190     return (access_flags & kAccStatic) != 0;
191   }
192 
193   // Returns true if the method is a constructor according to access flags.
194   bool IsConstructor() const {
195     return IsConstructor(GetAccessFlags());
196   }
197 
198   static bool IsConstructor(uint32_t access_flags) {
199     return (access_flags & kAccConstructor) != 0;
200   }
201 
202   // Returns true if the method is a class initializer according to access flags.
203   bool IsClassInitializer() const {
204     return IsClassInitializer(GetAccessFlags());
205   }
206 
207   static bool IsClassInitializer(uint32_t access_flags) {
208     return IsConstructor(access_flags) && IsStatic(access_flags);
209   }
210 
211   // Returns true if the method is static, private, or a constructor.
212   bool IsDirect() const {
213     return IsDirect(GetAccessFlags());
214   }
215 
216   static bool IsDirect(uint32_t access_flags) {
217     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
218     return (access_flags & direct) != 0;
219   }
220 
221   // Returns true if the method is declared synchronized.
222   bool IsSynchronized() const {
223     return IsSynchronized(GetAccessFlags());
224   }
225 
226   static bool IsSynchronized(uint32_t access_flags) {
227     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
228     return (access_flags & synchonized) != 0;
229   }
230 
231   // Returns true if the method is declared final.
232   bool IsFinal() const {
233     return IsFinal(GetAccessFlags());
234   }
235 
236   static bool IsFinal(uint32_t access_flags) {
237     return (access_flags & kAccFinal) != 0;
238   }
239 
240   // Returns true if the method is an intrinsic.
241   bool IsIntrinsic() const {
242     return IsIntrinsic(GetAccessFlags());
243   }
244 
245   static bool IsIntrinsic(uint32_t access_flags) {
246     return (access_flags & kAccIntrinsic) != 0;
247   }
248 
249   ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
250 
251   uint32_t GetIntrinsic() const {
252     static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
253     static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
254                   "kAccIntrinsicBits are not continuous");
255     static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
256                   "kAccIntrinsic overlaps kAccIntrinsicBits");
257     DCHECK(IsIntrinsic());
258     return (GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift;
259   }
260 
261   void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
262 
263   // Returns true if the method is a copied method.
264   bool IsCopied() const {
265     return IsCopied(GetAccessFlags());
266   }
267 
268   static bool IsCopied(uint32_t access_flags) {
269     // We do not have intrinsics for any default methods and therefore intrinsics are never copied.
270     // So we are using a flag from the intrinsic flags range and need to check `kAccIntrinsic` too.
271     static_assert((kAccCopied & kAccIntrinsicBits) != 0,
272                   "kAccCopied deliberately overlaps intrinsic bits");
273     const bool copied = (access_flags & (kAccIntrinsic | kAccCopied)) == kAccCopied;
274     // (IsMiranda() || IsDefaultConflicting()) implies copied
275     DCHECK(!(IsMiranda(access_flags) || IsDefaultConflicting(access_flags)) || copied)
276         << "Miranda or default-conflict methods must always be copied.";
277     return copied;
278   }
279 
280   bool IsMiranda() const {
281     return IsMiranda(GetAccessFlags());
282   }
283 
284   static bool IsMiranda(uint32_t access_flags) {
285     // Miranda methods are marked as copied and abstract but not default.
286     // We need to check the kAccIntrinsic too, see `IsCopied()`.
287     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
288     static constexpr uint32_t kValue = kAccCopied | kAccAbstract;
289     return (access_flags & kMask) == kValue;
290   }
291 
292   // A default conflict method is a special sentinel method that stands for a conflict between
293   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError
294   // if one attempts to do so.
295   bool IsDefaultConflicting() const {
296     return IsDefaultConflicting(GetAccessFlags());
297   }
298 
299   static bool IsDefaultConflicting(uint32_t access_flags) {
300     // Default conflct methods are marked as copied, abstract and default.
301     // We need to check the kAccIntrinsic too, see `IsCopied()`.
302     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
303     static constexpr uint32_t kValue = kAccCopied | kAccAbstract | kAccDefault;
304     return (access_flags & kMask) == kValue;
305   }
306 
307   // Returns true if invoking this method will not throw an AbstractMethodError or
308   // IncompatibleClassChangeError.
309   bool IsInvokable() const {
310     return IsInvokable(GetAccessFlags());
311   }
312 
313   static bool IsInvokable(uint32_t access_flags) {
314     // Default conflicting methods are marked with `kAccAbstract` (as well as `kAccCopied`
315     // and `kAccDefault`) but they are not considered abstract, see `IsAbstract()`.
316     DCHECK_EQ((access_flags & kAccAbstract) == 0,
317               !IsDefaultConflicting(access_flags) && !IsAbstract(access_flags));
318     return (access_flags & kAccAbstract) == 0;
319   }
320 
321   // Returns true if the method is marked as pre-compiled.
322   bool IsPreCompiled() const {
323     return IsPreCompiled(GetAccessFlags());
324   }
325 
326   static bool IsPreCompiled(uint32_t access_flags) {
327     // kAccCompileDontBother and kAccPreCompiled overlap with kAccIntrinsicBits.
328     static_assert((kAccCompileDontBother & kAccIntrinsicBits) != 0);
329     static_assert((kAccPreCompiled & kAccIntrinsicBits) != 0);
330     static constexpr uint32_t kMask = kAccIntrinsic | kAccCompileDontBother | kAccPreCompiled;
331     static constexpr uint32_t kValue = kAccCompileDontBother | kAccPreCompiled;
332     return (access_flags & kMask) == kValue;
333   }
334 
335   void SetPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
336     DCHECK(IsInvokable());
337     DCHECK(IsCompilable());
338     // kAccPreCompiled and kAccCompileDontBother overlaps with kAccIntrinsicBits.
339     // We don't mark the intrinsics as precompiled, which means in JIT zygote
340     // mode, compiled code for intrinsics will not be shared, and apps will
341     // compile intrinsics themselves if needed.
342     if (IsIntrinsic()) {
343       return;
344     }
345     AddAccessFlags(kAccPreCompiled | kAccCompileDontBother);
346   }
347 
348   void ClearPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
349     ClearAccessFlags(kAccPreCompiled | kAccCompileDontBother);
350   }
351 
352   // Returns true if the method resides in shared memory.
353   bool IsMemorySharedMethod() {
354     return IsMemorySharedMethod(GetAccessFlags());
355   }
356 
357   static bool IsMemorySharedMethod(uint32_t access_flags) {
358     return (access_flags & kAccMemorySharedMethod) != 0;
359   }
360 
361   void SetMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
362     DCHECK(!IsIntrinsic());
363     DCHECK(!IsAbstract());
364     AddAccessFlags(kAccMemorySharedMethod);
365   }
366 
367   static uint32_t SetMemorySharedMethod(uint32_t access_flags) {
368     DCHECK(!IsIntrinsic(access_flags));
369     DCHECK(!IsAbstract(access_flags));
370     return access_flags | kAccMemorySharedMethod;
371   }
372 
373   void ClearMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
374     uint32_t access_flags = GetAccessFlags();
375     if (IsIntrinsic(access_flags) || IsAbstract(access_flags)) {
376       return;
377     }
378     if (IsMemorySharedMethod(access_flags)) {
379       ClearAccessFlags(kAccMemorySharedMethod);
380     }
381   }
382 
383   // Returns true if the method can be compiled.
384   bool IsCompilable() const {
385     return IsCompilable(GetAccessFlags());
386   }
387 
388   static bool IsCompilable(uint32_t access_flags) {
389     if (IsIntrinsic(access_flags)) {
390       // kAccCompileDontBother overlaps with kAccIntrinsicBits.
391       return true;
392     }
393     if (IsPreCompiled(access_flags)) {
394       return true;
395     }
396     return (access_flags & kAccCompileDontBother) == 0;
397   }
398 
399   void ClearDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
400     DCHECK(!IsMiranda());
401     ClearAccessFlags(kAccCompileDontBother);
402   }
403 
404   void SetDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
405     DCHECK(!IsMiranda());
406     AddAccessFlags(kAccCompileDontBother);
407   }
408 
409   // This is set by the class linker.
410   bool IsDefault() const {
411     return IsDefault(GetAccessFlags());
412   }
413 
414   static bool IsDefault(uint32_t access_flags) {
415     static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
416                   "kAccDefault conflicts with intrinsic modifier");
417     return (access_flags & kAccDefault) != 0;
418   }
419 
420   // Returns true if the method is obsolete.
421   bool IsObsolete() const {
422     return IsObsolete(GetAccessFlags());
423   }
424 
425   static bool IsObsolete(uint32_t access_flags) {
426     return (access_flags & kAccObsoleteMethod) != 0;
427   }
428 
429   void SetIsObsolete() REQUIRES_SHARED(Locks::mutator_lock_) {
430     AddAccessFlags(kAccObsoleteMethod);
431   }
432 
433   // Returns true if the method is native.
434   bool IsNative() const {
435     return IsNative(GetAccessFlags());
436   }
437 
438   static bool IsNative(uint32_t access_flags) {
439     return (access_flags & kAccNative) != 0;
440   }
441 
442   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
443   bool IsFastNative() const {
444     return IsFastNative(GetAccessFlags());
445   }
446 
447   static bool IsFastNative(uint32_t access_flags) {
448     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
449     // The kAccFastNative flag value is used with a different meaning for non-native methods,
450     // so we need to check the kAccNative flag as well.
451     constexpr uint32_t mask = kAccFastNative | kAccNative;
452     return (access_flags & mask) == mask;
453   }
454 
455   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
456   bool IsCriticalNative() const {
457     return IsCriticalNative(GetAccessFlags());
458   }
459 
460   static bool IsCriticalNative(uint32_t access_flags) {
461     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
462     // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
463     // so we need to check the kAccNative flag as well.
464     constexpr uint32_t mask = kAccCriticalNative | kAccNative;
465     return (access_flags & mask) == mask;
466   }
467 
468   // Returns true if the method is managed (not native).
469   bool IsManaged() const {
470     return IsManaged(GetAccessFlags());
471   }
472 
473   static bool IsManaged(uint32_t access_flags) {
474     return !IsNative(access_flags);
475   }
476 
477   // Returns true if the method is managed (not native) and invokable.
478   bool IsManagedAndInvokable() const {
479     return IsManagedAndInvokable(GetAccessFlags());
480   }
481 
482   static bool IsManagedAndInvokable(uint32_t access_flags) {
483     return IsManaged(access_flags) && IsInvokable(access_flags);
484   }
485 
486   // Returns true if the method is abstract.
487   bool IsAbstract() const {
488     return IsAbstract(GetAccessFlags());
489   }
490 
491   static bool IsAbstract(uint32_t access_flags) {
492     // Default confliciting methods have `kAccAbstract` set but they are not actually abstract.
493     return (access_flags & kAccAbstract) != 0 && !IsDefaultConflicting(access_flags);
494   }
495 
496   // Returns true if the method is declared synthetic.
497   bool IsSynthetic() const {
498     return IsSynthetic(GetAccessFlags());
499   }
500 
501   static bool IsSynthetic(uint32_t access_flags) {
502     return (access_flags & kAccSynthetic) != 0;
503   }
504 
505   // Returns true if the method is declared varargs.
506   bool IsVarargs() const {
507     return IsVarargs(GetAccessFlags());
508   }
509 
510   static bool IsVarargs(uint32_t access_flags) {
511     return (access_flags & kAccVarargs) != 0;
512   }
513 
514   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
515 
516   bool IsSignaturePolymorphic() REQUIRES_SHARED(Locks::mutator_lock_);
517 
518   bool SkipAccessChecks() const {
519     // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
520     // so we need to check the kAccNative flag as well.
521     return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
522   }
523 
524   void SetSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
525     // SkipAccessChecks() is applicable only to non-native methods.
526     DCHECK(!IsNative());
527     AddAccessFlags(kAccSkipAccessChecks);
528   }
529   void ClearSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
530     // SkipAccessChecks() is applicable only to non-native methods.
531     DCHECK(!IsNative());
532     ClearAccessFlags(kAccSkipAccessChecks);
533   }
534 
535   // Returns true if the method has previously been warm.
536   bool PreviouslyWarm() const {
537     return PreviouslyWarm(GetAccessFlags());
538   }
539 
540   static bool PreviouslyWarm(uint32_t access_flags) {
541     // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. Return true for intrinsics.
542     constexpr uint32_t mask = kAccPreviouslyWarm | kAccIntrinsic;
543     return (access_flags & mask) != 0u;
544   }
545 
546   void SetPreviouslyWarm() REQUIRES_SHARED(Locks::mutator_lock_) {
547     if (IsIntrinsic()) {
548       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
549       return;
550     }
551     AddAccessFlags(kAccPreviouslyWarm);
552   }
553 
554   // Should this method be run in the interpreter and count locks (e.g., failed structured-
555   // locking verification)?
556   bool MustCountLocks() const {
557     return MustCountLocks(GetAccessFlags());
558   }
559 
560   static bool MustCountLocks(uint32_t access_flags) {
561     if (IsIntrinsic(access_flags)) {
562       return false;
563     }
564     return (access_flags & kAccMustCountLocks) != 0;
565   }
566 
567   void ClearMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
568     ClearAccessFlags(kAccMustCountLocks);
569   }
570 
571   void SetMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
572     ClearAccessFlags(kAccSkipAccessChecks);
573     AddAccessFlags(kAccMustCountLocks);
574   }
575 
576   // Returns true if the method is using the nterp entrypoint fast path.
577   bool HasNterpEntryPointFastPathFlag() const {
578     return HasNterpEntryPointFastPathFlag(GetAccessFlags());
579   }
580 
581   static bool HasNterpEntryPointFastPathFlag(uint32_t access_flags) {
582     constexpr uint32_t mask = kAccNative | kAccNterpEntryPointFastPathFlag;
583     return (access_flags & mask) == kAccNterpEntryPointFastPathFlag;
584   }
585 
586   void SetNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
587     DCHECK(!IsNative());
588     AddAccessFlags(kAccNterpEntryPointFastPathFlag);
589   }
590 
591   void ClearNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
592     DCHECK(!IsNative());
593     ClearAccessFlags(kAccNterpEntryPointFastPathFlag);
594   }
595 
596   void SetNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
597     AddAccessFlags(kAccNterpInvokeFastPathFlag);
598   }
599 
600   void ClearNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
601     ClearAccessFlags(kAccNterpInvokeFastPathFlag);
602   }
603 
604   static uint32_t ClearNterpFastPathFlags(uint32_t access_flags) {
605     // `kAccNterpEntryPointFastPathFlag` has a different use for native methods.
606     if (!IsNative(access_flags)) {
607       access_flags &= ~kAccNterpEntryPointFastPathFlag;
608     }
609     access_flags &= ~kAccNterpInvokeFastPathFlag;
610     return access_flags;
611   }
612 
613   // Returns whether the method is a string constructor. The method must not
614   // be a class initializer. (Class initializers are called from a different
615   // context where we do not need to check for string constructors.)
616   bool IsStringConstructor() REQUIRES_SHARED(Locks::mutator_lock_);
617 
618   // Returns true if this method could be overridden by a default method.
619   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
620 
621   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
622 
623   // Throws the error that would result from trying to invoke this method (i.e.
624   // IncompatibleClassChangeError, AbstractMethodError, or IllegalAccessError).
625   // Only call if !IsInvokable();
626   void ThrowInvocationTimeError(ObjPtr<mirror::Object> receiver)
627       REQUIRES_SHARED(Locks::mutator_lock_);
628 
629   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
630 
631   // Doesn't do erroneous / unresolved class checks.
632   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
633 
634   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
635     return GetMethodIndex();
636   }
637 
638   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
639     // Not called within a transaction.
640     method_index_ = new_method_index;
641   }
642 
643   static constexpr MemberOffset DexMethodIndexOffset() {
644     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
645   }
646 
647   static constexpr MemberOffset MethodIndexOffset() {
648     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
649   }
650 
651   static constexpr MemberOffset ImtIndexOffset() {
652     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, imt_index_));
653   }
654 
655   // Number of 32bit registers that would be required to hold all the arguments
656   static size_t NumArgRegisters(std::string_view shorty);
657 
658   ALWAYS_INLINE uint32_t GetDexMethodIndex() const {
659     return dex_method_index_;
660   }
661 
662   void SetDexMethodIndex(uint32_t new_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
663     // Not called within a transaction.
664     dex_method_index_ = new_idx;
665   }
666 
667   // Lookup the Class from the type index into this method's dex cache.
668   ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
669       REQUIRES_SHARED(Locks::mutator_lock_);
670   // Resolve the Class from the type index into this method's dex cache.
671   ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
672       REQUIRES_SHARED(Locks::mutator_lock_);
673 
674   // Returns true if this method has the same name and signature of the other method.
675   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
676 
677   // Find the method that this method overrides.
678   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
679       REQUIRES_SHARED(Locks::mutator_lock_);
680 
681   // Find the method index for this method within other_dexfile. If this method isn't present then
682   // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
683   // name and signature in the other_dexfile, such as the method index used to resolve this method
684   // in the other_dexfile.
685   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
686                                             uint32_t name_and_signature_idx)
687       REQUIRES_SHARED(Locks::mutator_lock_);
688 
689   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
690       REQUIRES_SHARED(Locks::mutator_lock_);
691 
692   template <char ReturnType, char... ArgType>
693   typename detail::ShortyTraits<ReturnType>::Type
694   InvokeStatic(Thread* self, typename detail::ShortyTraits<ArgType>::Type... args)
695       REQUIRES_SHARED(Locks::mutator_lock_);
696 
697   template <char ReturnType, char... ArgType>
698   typename detail::ShortyTraits<ReturnType>::Type
699   InvokeInstance(Thread* self,
700                  ObjPtr<mirror::Object> receiver,
701                  typename detail::ShortyTraits<ArgType>::Type... args)
702       REQUIRES_SHARED(Locks::mutator_lock_);
703 
704   template <char ReturnType, char... ArgType>
705   typename detail::ShortyTraits<ReturnType>::Type
706   InvokeFinal(Thread* self,
707               ObjPtr<mirror::Object> receiver,
708               typename detail::ShortyTraits<ArgType>::Type... args)
709       REQUIRES_SHARED(Locks::mutator_lock_);
710 
711   template <char ReturnType, char... ArgType>
712   typename detail::ShortyTraits<ReturnType>::Type
713   InvokeVirtual(Thread* self,
714                 ObjPtr<mirror::Object> receiver,
715                 typename detail::ShortyTraits<ArgType>::Type... args)
716       REQUIRES_SHARED(Locks::mutator_lock_);
717 
718   template <char ReturnType, char... ArgType>
719   typename detail::ShortyTraits<ReturnType>::Type
720   InvokeInterface(Thread* self,
721                   ObjPtr<mirror::Object> receiver,
722                   typename detail::ShortyTraits<ArgType>::Type... args)
723       REQUIRES_SHARED(Locks::mutator_lock_);
724 
725   template <char... ArgType, typename HandleScopeType>
726   Handle<mirror::Object> NewObject(HandleScopeType& hs,
727                                    Thread* self,
728                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
729       REQUIRES_SHARED(Locks::mutator_lock_);
730 
731   template <char... ArgType>
732   ObjPtr<mirror::Object> NewObject(Thread* self,
733                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
734       REQUIRES_SHARED(Locks::mutator_lock_);
735 
736   // Returns true if the method needs a class initialization check according to access flags.
737   // Only static methods other than the class initializer need this check.
738   // The caller is responsible for performing the actual check.
739   bool NeedsClinitCheckBeforeCall() const {
740     return NeedsClinitCheckBeforeCall(GetAccessFlags());
741   }
742 
743   static bool NeedsClinitCheckBeforeCall(uint32_t access_flags) {
744     // The class initializer is special as it is invoked during initialization
745     // and does not need the check.
746     return IsStatic(access_flags) && !IsConstructor(access_flags);
747   }
748 
749   // Check if the method needs a class initialization check before call
750   // and its declaring class is not yet visibly initialized.
751   // (The class needs to be visibly initialized before we can use entrypoints
752   // to compiled code for static methods. See b/18161648 .)
753   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
754   bool StillNeedsClinitCheck() REQUIRES_SHARED(Locks::mutator_lock_);
755 
756   // Similar to `StillNeedsClinitCheck()` but the method's declaring class may
757   // be dead but not yet reclaimed by the GC, so we cannot do a full read barrier
758   // but we still want to check the class status in the to-space class if any.
759   // Note: JIT can hold and use such methods during managed heap GC.
760   bool StillNeedsClinitCheckMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
761 
762   // Check if the declaring class has been verified and look at the to-space
763   // class object, if any, as in `StillNeedsClinitCheckMayBeDead()`.
764   bool IsDeclaringClassVerifiedMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
765 
766   const void* GetEntryPointFromQuickCompiledCode() const {
767     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
768   }
769   ALWAYS_INLINE
770   const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) const {
771     return GetNativePointer<const void*>(
772         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
773   }
774 
775   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
776       REQUIRES_SHARED(Locks::mutator_lock_) {
777     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
778                                               kRuntimePointerSize);
779   }
780   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
781       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size)
782       REQUIRES_SHARED(Locks::mutator_lock_);
783 
784   static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
785     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
786         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
787   }
788 
789   static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
790     return DataOffset(pointer_size);
791   }
792 
793   static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
794     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
795         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
796             * static_cast<size_t>(pointer_size));
797   }
798 
799   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) const {
800     DCHECK(IsRuntimeMethod());
801     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
802   }
803 
804   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size)
805       REQUIRES_SHARED(Locks::mutator_lock_) {
806     DCHECK(IsRuntimeMethod());
807     SetDataPtrSize(table, pointer_size);
808   }
809 
810   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
811 
812   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl)
813       REQUIRES_SHARED(Locks::mutator_lock_) {
814     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
815     if (single_impl) {
816       AddAccessFlags(kAccSingleImplementation);
817     } else {
818       ClearAccessFlags(kAccSingleImplementation);
819     }
820   }
821 
822   ALWAYS_INLINE bool HasSingleImplementationFlag() const {
823     return (GetAccessFlags() & kAccSingleImplementation) != 0;
824   }
825 
826   static uint32_t SetHasSingleImplementation(uint32_t access_flags, bool single_impl) {
827     DCHECK(!IsIntrinsic(access_flags)) << "conflict with intrinsic bits";
828     if (single_impl) {
829       return access_flags | kAccSingleImplementation;
830     } else {
831       return access_flags & ~kAccSingleImplementation;
832     }
833   }
834 
835   // Takes a method and returns a 'canonical' one if the method is default (and therefore
836   // potentially copied from some other class). For example, this ensures that the debugger does not
837   // get confused as to which method we are in.
838   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
839       REQUIRES_SHARED(Locks::mutator_lock_);
840 
841   ArtMethod* GetSingleImplementation(PointerSize pointer_size);
842 
843   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size)
844       REQUIRES_SHARED(Locks::mutator_lock_) {
845     DCHECK(!IsNative());
846     // Non-abstract method's single implementation is just itself.
847     DCHECK(IsAbstract());
848     DCHECK(method == nullptr || method->IsInvokable());
849     SetDataPtrSize(method, pointer_size);
850   }
851 
852   void* GetEntryPointFromJni() const {
853     DCHECK(IsNative());
854     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
855   }
856 
857   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) const {
858     return GetDataPtrSize(pointer_size);
859   }
860 
861   void SetEntryPointFromJni(const void* entrypoint)
862       REQUIRES_SHARED(Locks::mutator_lock_) {
863     // The resolution method also has a JNI entrypoint for direct calls from
864     // compiled code to the JNI dlsym lookup stub for @CriticalNative.
865     DCHECK(IsNative() || IsRuntimeMethod());
866     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
867   }
868 
869   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size)
870       REQUIRES_SHARED(Locks::mutator_lock_) {
871     SetDataPtrSize(entrypoint, pointer_size);
872   }
873 
874   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) const {
875     DCHECK(IsImagePointerSize(pointer_size));
876     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
877   }
878 
879   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size)
880       REQUIRES_SHARED(Locks::mutator_lock_) {
881     DCHECK(IsImagePointerSize(pointer_size));
882     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
883   }
884 
885   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
886   // conventions for a method of managed code. Returns false for Proxy methods.
887   ALWAYS_INLINE bool IsRuntimeMethod() const {
888     return dex_method_index_ == kRuntimeMethodDexMethodIndex;
889   }
890 
891   bool HasCodeItem() REQUIRES_SHARED(Locks::mutator_lock_) {
892     uint32_t access_flags = GetAccessFlags();
893     return !IsNative(access_flags) &&
894            !IsAbstract(access_flags) &&
895            !IsDefaultConflicting(access_flags) &&
896            !IsRuntimeMethod() &&
897            !IsProxyMethod();
898   }
899 
900   // We need to explicitly indicate whether the code item is obtained from the compact dex file,
901   // because in JVMTI, we obtain the code item from the standard dex file to update the method.
902   void SetCodeItem(const dex::CodeItem* code_item, bool is_compact_dex_code_item)
903       REQUIRES_SHARED(Locks::mutator_lock_);
904 
905   // Is this a hand crafted method used for something like describing callee saves?
906   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
907 
908   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
909 
910   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
911 
912   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
913   // indicates whether the found catch block is responsible for clearing the exception or whether
914   // a move-exception instruction is present.
915   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
916                           bool* has_no_move_exception)
917       REQUIRES_SHARED(Locks::mutator_lock_);
918 
919   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
920   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
921            bool kVisitProxyMethod = true,
922            typename RootVisitorType>
923   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
924 
925   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
926 
927   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
928   std::string_view GetDeclaringClassDescriptorView() REQUIRES_SHARED(Locks::mutator_lock_);
929 
930   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
931 
932   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
933 
934   std::string_view GetShortyView() REQUIRES_SHARED(Locks::mutator_lock_);
935 
936   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
937 
938   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
939 
940   ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_);
941 
942   ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_);
943 
944   bool NameEquals(ObjPtr<mirror::String> name) REQUIRES_SHARED(Locks::mutator_lock_);
945 
946   const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
947 
948   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
949 
950   const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
951 
952   const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
953 
954   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
955 
956   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
957 
958   const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
959 
960   ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
961 
962   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
963   std::string_view GetReturnTypeDescriptorView() REQUIRES_SHARED(Locks::mutator_lock_);
964 
965   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
966 
967   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
968       REQUIRES_SHARED(Locks::mutator_lock_);
969 
970   // Lookup return type.
971   ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
972   // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
973   // calling ResolveType this caused a large number of bugs at call sites.
974   ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
975 
976   ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
977 
978   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
979   ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
980   template <ReadBarrierOption kReadBarrierOption>
981   ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
982 
983   ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
984       REQUIRES_SHARED(Locks::mutator_lock_);
985   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
986       REQUIRES_SHARED(Locks::mutator_lock_);
987 
988   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
989 
990   // May cause thread suspension due to class resolution.
991   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
992       REQUIRES_SHARED(Locks::mutator_lock_);
993 
994   // Size of an instance of this native class.
995   static constexpr size_t Size(PointerSize pointer_size) {
996     return PtrSizedFieldsOffset(pointer_size) +
997         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
998   }
999 
1000   // Alignment of an instance of this native class.
1001   static constexpr size_t Alignment(PointerSize pointer_size) {
1002     // The ArtMethod alignment is the same as image pointer size. This differs from
1003     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
1004     return static_cast<size_t>(pointer_size);
1005   }
1006 
1007   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
1008       REQUIRES_SHARED(Locks::mutator_lock_);
1009 
1010   ALWAYS_INLINE void ResetCounter(uint16_t new_value);
1011   ALWAYS_INLINE void UpdateCounter(int32_t new_samples);
1012   ALWAYS_INLINE void SetHotCounter();
1013   ALWAYS_INLINE bool CounterIsHot();
1014   ALWAYS_INLINE uint16_t GetCounter();
1015   ALWAYS_INLINE bool CounterHasChanged(uint16_t threshold);
1016 
1017   ALWAYS_INLINE static constexpr uint16_t MaxCounter() {
1018     return std::numeric_limits<decltype(hotness_count_)>::max();
1019   }
1020 
1021   ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1022 
1023   void CalculateAndSetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1024 
1025   static constexpr MemberOffset HotnessCountOffset() {
1026     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
1027   }
1028 
1029   // Returns the method header for the compiled code containing 'pc'. Note that runtime
1030   // methods will return null for this method, as they are not oat based.
1031   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
1032       REQUIRES_SHARED(Locks::mutator_lock_);
1033 
1034   // Get compiled code for the method, return null if no code exists.
1035   const void* GetOatMethodQuickCode(PointerSize pointer_size)
1036       REQUIRES_SHARED(Locks::mutator_lock_);
1037 
1038   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
1039   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
1040   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
1041       REQUIRES_SHARED(Locks::mutator_lock_);
1042   std::string PrettyMethod(bool with_signature = true)
1043       REQUIRES_SHARED(Locks::mutator_lock_);
1044   // Returns the JNI native function name for the non-overloaded method 'm'.
1045   std::string JniShortName()
1046       REQUIRES_SHARED(Locks::mutator_lock_);
1047   // Returns the JNI native function name for the overloaded method 'm'.
1048   std::string JniLongName()
1049       REQUIRES_SHARED(Locks::mutator_lock_);
1050 
1051   // Update entry points by passing them through the visitor.
1052   template <typename Visitor>
1053   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size)
1054       REQUIRES_SHARED(Locks::mutator_lock_);
1055 
1056   // Visit the individual members of an ArtMethod.  Used by imgdiag.
1057   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
1058   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
1059   template <typename VisitorFunc>
1060   void VisitMembers(VisitorFunc& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1061     DCHECK(IsImagePointerSize(kRuntimePointerSize));
1062     visitor(this, &declaring_class_, "declaring_class_");
1063     visitor(this, &access_flags_, "access_flags_");
1064     visitor(this, &dex_method_index_, "dex_method_index_");
1065     visitor(this, &method_index_, "method_index_");
1066     visitor(this, &hotness_count_, "hotness_count_");
1067     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
1068     visitor(this,
1069             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
1070             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
1071   }
1072 
1073   // Returns the dex instructions of the code item for the art method. Returns an empty array for
1074   // the null code item case.
1075   ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
1076       REQUIRES_SHARED(Locks::mutator_lock_);
1077 
1078   // Returns the dex code item data section of the DexFile for the art method.
1079   ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
1080       REQUIRES_SHARED(Locks::mutator_lock_);
1081 
1082   // Returns the dex code item debug info section of the DexFile for the art method.
1083   ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
1084       REQUIRES_SHARED(Locks::mutator_lock_);
1085 
1086   GcRoot<mirror::Class>& DeclaringClassRoot() {
1087     return declaring_class_;
1088   }
1089 
1090  protected:
1091   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
1092   // The class we are a part of.
1093   GcRoot<mirror::Class> declaring_class_;
1094 
1095   // Access flags; low 16 bits are defined by spec.
1096   // Getting and setting this flag needs to be atomic when concurrency is
1097   // possible, e.g. after this method's class is linked. Such as when setting
1098   // verifier flags and single-implementation flag.
1099   std::atomic<std::uint32_t> access_flags_;
1100 
1101   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
1102 
1103   // Index into method_ids of the dex file associated with this method.
1104   uint32_t dex_method_index_;
1105 
1106   /* End of dex file fields. */
1107 
1108   // Entry within a dispatch table for this method. For static/direct methods the index is into
1109   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
1110   // interface's method array in `IfTable`s of implementing classes.
1111   uint16_t method_index_;
1112 
1113   union {
1114     // Non-abstract methods: The hotness we measure for this method. Not atomic,
1115     // as we allow missing increments: if the method is hot, we will see it eventually.
1116     uint16_t hotness_count_;
1117     // Abstract methods: IMT index.
1118     uint16_t imt_index_;
1119   };
1120 
1121   // Fake padding field gets inserted here.
1122 
1123   // Must be the last fields in the method.
1124   struct PtrSizedFields {
1125     // Depending on the method type, the data is
1126     //   - native method: pointer to the JNI function registered to this method
1127     //                    or a function to resolve the JNI function,
1128     //   - resolution method: pointer to a function to resolve the method and
1129     //                        the JNI function for @CriticalNative.
1130     //   - conflict method: ImtConflictTable,
1131     //   - abstract/interface method: the single-implementation if any,
1132     //   - proxy method: the original interface method or constructor,
1133     //   - default conflict method: null
1134     //   - other methods: during AOT the code item offset, at runtime a pointer
1135     //                    to the code item.
1136     void* data_;
1137 
1138     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
1139     // the interpreter.
1140     void* entry_point_from_quick_compiled_code_;
1141   } ptr_sized_fields_;
1142 
1143  private:
1144   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1145 
1146   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
1147     // Round up to pointer size for padding field. Tested in art_method.cc.
1148     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
1149                    static_cast<size_t>(pointer_size));
1150   }
1151 
1152   // Compare given pointer size to the image pointer size.
1153   static bool IsImagePointerSize(PointerSize pointer_size);
1154 
1155   dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1156 
1157   template<typename T>
1158   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
1159     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1160     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1161     if (pointer_size == PointerSize::k32) {
1162       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
1163     } else {
1164       auto v = *reinterpret_cast<const uint64_t*>(addr);
1165       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
1166     }
1167   }
1168 
1169   template<typename T>
1170   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
1171       REQUIRES_SHARED(Locks::mutator_lock_) {
1172     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1173     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1174     if (pointer_size == PointerSize::k32) {
1175       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
1176       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
1177     } else {
1178       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
1179     }
1180   }
1181 
1182   static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
1183     return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
1184             ((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
1185             ((modifier & kAccIntrinsicBits) != 0));  // b/228049006: ensure intrinsic is not `kNone`
1186   }
1187 
1188   static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
1189     return (modifier & kAccIntrinsicBits) != 0;
1190   }
1191 
1192   // This setter guarantees atomicity.
1193   void AddAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1194     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1195     // None of the readers rely ordering.
1196     access_flags_.fetch_or(flag, std::memory_order_relaxed);
1197   }
1198 
1199   // This setter guarantees atomicity.
1200   void ClearAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1201     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1202     access_flags_.fetch_and(~flag, std::memory_order_relaxed);
1203   }
1204 
1205   // Helper method for checking the class status of a possibly dead declaring class.
1206   // See `StillNeedsClinitCheckMayBeDead()` and `IsDeclaringClassVerifierMayBeDead()`.
1207   ObjPtr<mirror::Class> GetDeclaringClassMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
1208 
1209   // Used by GetName and GetNameView to share common code.
1210   const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
1211 
1212   friend class RuntimeImageHelper;  // For SetNativePointer.
1213 
1214   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
1215 };
1216 
1217 class MethodCallback {
1218  public:
1219   virtual ~MethodCallback() {}
1220 
1221   virtual void RegisterNativeMethod(ArtMethod* method,
1222                                     const void* original_implementation,
1223                                     /*out*/void** new_implementation)
1224       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
1225 };
1226 
1227 }  // namespace art
1228 
1229 #endif  // ART_RUNTIME_ART_METHOD_H_
1230