xref: /aosp_15_r20/art/runtime/art_method.h (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 #include <limits>
22 
23 #include <android-base/logging.h>
24 #include <jni.h>
25 
26 #include "base/array_ref.h"
27 #include "base/bit_utils.h"
28 #include "base/casts.h"
29 #include "base/logging.h"
30 #include "base/macros.h"
31 #include "base/pointer_size.h"
32 #include "base/runtime_debug.h"
33 #include "dex/dex_file_structs.h"
34 #include "dex/modifiers.h"
35 #include "dex/primitive.h"
36 #include "interpreter/mterp/nterp.h"
37 #include "gc_root.h"
38 #include "intrinsics_enum.h"
39 #include "obj_ptr.h"
40 #include "offsets.h"
41 #include "read_barrier_option.h"
42 
43 namespace art HIDDEN {
44 
45 class CodeItemDataAccessor;
46 class CodeItemDebugInfoAccessor;
47 class CodeItemInstructionAccessor;
48 class DexFile;
49 template<class T> class Handle;
50 class ImtConflictTable;
51 enum InvokeType : uint32_t;
52 union JValue;
53 template<typename T> class LengthPrefixedArray;
54 class OatQuickMethodHeader;
55 class ProfilingInfo;
56 class ScopedObjectAccessAlreadyRunnable;
57 class ShadowFrame;
58 class Signature;
59 
60 namespace mirror {
61 class Array;
62 class Class;
63 class ClassLoader;
64 class DexCache;
65 class IfTable;
66 class Object;
67 template <typename MirrorType> class ObjectArray;
68 class PointerArray;
69 class String;
70 }  // namespace mirror
71 
72 namespace detail {
73 template <char Shorty> struct ShortyTraits;
74 template <> struct ShortyTraits<'V'>;
75 template <> struct ShortyTraits<'Z'>;
76 template <> struct ShortyTraits<'B'>;
77 template <> struct ShortyTraits<'C'>;
78 template <> struct ShortyTraits<'S'>;
79 template <> struct ShortyTraits<'I'>;
80 template <> struct ShortyTraits<'J'>;
81 template <> struct ShortyTraits<'F'>;
82 template <> struct ShortyTraits<'D'>;
83 template <> struct ShortyTraits<'L'>;
84 template <char Shorty> struct HandleShortyTraits;
85 template <> struct HandleShortyTraits<'L'>;
86 }  // namespace detail
87 
88 class EXPORT ArtMethod final {
89  public:
90   // Should the class state be checked on sensitive operations?
91   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
92 
93   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
94   // constexpr, and ensure that the value is correct in art_method.cc.
95   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
96 
97   ArtMethod() : access_flags_(0), dex_method_index_(0),
98       method_index_(0), hotness_count_(0) { }
99 
100   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
101     CopyFrom(src, image_pointer_size);
102   }
103 
104   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
105                                         jobject jlr_method)
106       REQUIRES_SHARED(Locks::mutator_lock_);
107 
108   // Visit the declaring class in 'method' if it is within [start_boundary, end_boundary).
109   template<typename RootVisitorType>
110   static void VisitRoots(RootVisitorType& visitor,
111                          uint8_t* start_boundary,
112                          uint8_t* end_boundary,
113                          ArtMethod* method)
114       REQUIRES_SHARED(Locks::mutator_lock_);
115 
116   // Visit declaring classes of all the art-methods in 'array' that reside
117   // in [start_boundary, end_boundary).
118   template<PointerSize kPointerSize, typename RootVisitorType>
119   static void VisitArrayRoots(RootVisitorType& visitor,
120                               uint8_t* start_boundary,
121                               uint8_t* end_boundary,
122                               LengthPrefixedArray<ArtMethod>* array)
123       REQUIRES_SHARED(Locks::mutator_lock_);
124 
125   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
126   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
127 
128   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
129   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
130       REQUIRES_SHARED(Locks::mutator_lock_);
131 
132   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
133     return declaring_class_.AddressWithoutBarrier();
134   }
135 
136   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
137       REQUIRES_SHARED(Locks::mutator_lock_);
138 
139   bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class)
140       REQUIRES_SHARED(Locks::mutator_lock_);
141 
142   static constexpr MemberOffset DeclaringClassOffset() {
143     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
144   }
145 
146   uint32_t GetAccessFlags() const {
147     return access_flags_.load(std::memory_order_relaxed);
148   }
149 
150   // This version should only be called when it's certain there is no
151   // concurrency so there is no need to guarantee atomicity. For example,
152   // before the method is linked.
153   void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) {
154     // The following check ensures that we do not set `Intrinsics::kNone` (see b/228049006).
155     DCHECK_IMPLIES((new_access_flags & kAccIntrinsic) != 0,
156                    (new_access_flags & kAccIntrinsicBits) != 0);
157     access_flags_.store(new_access_flags, std::memory_order_relaxed);
158   }
159 
160   static constexpr MemberOffset AccessFlagsOffset() {
161     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
162   }
163 
164   // Approximate what kind of method call would be used for this method.
165   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
166 
167   // Returns true if the method is declared public.
168   bool IsPublic() const {
169     return IsPublic(GetAccessFlags());
170   }
171 
172   static bool IsPublic(uint32_t access_flags) {
173     return (access_flags & kAccPublic) != 0;
174   }
175 
176   // Returns true if the method is declared private.
177   bool IsPrivate() const {
178     return IsPrivate(GetAccessFlags());
179   }
180 
181   static bool IsPrivate(uint32_t access_flags) {
182     return (access_flags & kAccPrivate) != 0;
183   }
184 
185   // Returns true if the method is declared static.
186   bool IsStatic() const {
187     return IsStatic(GetAccessFlags());
188   }
189 
190   static bool IsStatic(uint32_t access_flags) {
191     return (access_flags & kAccStatic) != 0;
192   }
193 
194   // Returns true if the method is a constructor according to access flags.
195   bool IsConstructor() const {
196     return IsConstructor(GetAccessFlags());
197   }
198 
199   static bool IsConstructor(uint32_t access_flags) {
200     return (access_flags & kAccConstructor) != 0;
201   }
202 
203   // Returns true if the method is a class initializer according to access flags.
204   bool IsClassInitializer() const {
205     return IsClassInitializer(GetAccessFlags());
206   }
207 
208   static bool IsClassInitializer(uint32_t access_flags) {
209     return IsConstructor(access_flags) && IsStatic(access_flags);
210   }
211 
212   // Returns true if the method is static, private, or a constructor.
213   bool IsDirect() const {
214     return IsDirect(GetAccessFlags());
215   }
216 
217   static bool IsDirect(uint32_t access_flags) {
218     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
219     return (access_flags & direct) != 0;
220   }
221 
222   // Returns true if the method is declared synchronized.
223   bool IsSynchronized() const {
224     return IsSynchronized(GetAccessFlags());
225   }
226 
227   static bool IsSynchronized(uint32_t access_flags) {
228     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
229     return (access_flags & synchonized) != 0;
230   }
231 
232   // Returns true if the method is declared final.
233   bool IsFinal() const {
234     return IsFinal(GetAccessFlags());
235   }
236 
237   static bool IsFinal(uint32_t access_flags) {
238     return (access_flags & kAccFinal) != 0;
239   }
240 
241   // Returns true if the method is an intrinsic.
242   bool IsIntrinsic() const {
243     return IsIntrinsic(GetAccessFlags());
244   }
245 
246   static bool IsIntrinsic(uint32_t access_flags) {
247     return (access_flags & kAccIntrinsic) != 0;
248   }
249 
250   ALWAYS_INLINE void SetIntrinsic(Intrinsics intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
251 
252   Intrinsics GetIntrinsic() const {
253     static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
254     static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
255                   "kAccIntrinsicBits are not continuous");
256     static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
257                   "kAccIntrinsic overlaps kAccIntrinsicBits");
258     DCHECK(IsIntrinsic());
259     return static_cast<Intrinsics>((GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift);
260   }
261 
262   void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
263 
264   // Returns true if the method is a copied method.
265   bool IsCopied() const {
266     return IsCopied(GetAccessFlags());
267   }
268 
269   static bool IsCopied(uint32_t access_flags) {
270     // We do not have intrinsics for any default methods and therefore intrinsics are never copied.
271     // So we are using a flag from the intrinsic flags range and need to check `kAccIntrinsic` too.
272     static_assert((kAccCopied & kAccIntrinsicBits) != 0,
273                   "kAccCopied deliberately overlaps intrinsic bits");
274     const bool copied = (access_flags & (kAccIntrinsic | kAccCopied)) == kAccCopied;
275     // (IsMiranda() || IsDefaultConflicting()) implies copied
276     DCHECK(!(IsMiranda(access_flags) || IsDefaultConflicting(access_flags)) || copied)
277         << "Miranda or default-conflict methods must always be copied.";
278     return copied;
279   }
280 
281   bool IsMiranda() const {
282     return IsMiranda(GetAccessFlags());
283   }
284 
285   static bool IsMiranda(uint32_t access_flags) {
286     // Miranda methods are marked as copied and abstract but not default.
287     // We need to check the kAccIntrinsic too, see `IsCopied()`.
288     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
289     static constexpr uint32_t kValue = kAccCopied | kAccAbstract;
290     return (access_flags & kMask) == kValue;
291   }
292 
293   // A default conflict method is a special sentinel method that stands for a conflict between
294   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError
295   // if one attempts to do so.
296   bool IsDefaultConflicting() const {
297     return IsDefaultConflicting(GetAccessFlags());
298   }
299 
300   static bool IsDefaultConflicting(uint32_t access_flags) {
301     // Default conflct methods are marked as copied, abstract and default.
302     // We need to check the kAccIntrinsic too, see `IsCopied()`.
303     static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
304     static constexpr uint32_t kValue = kAccCopied | kAccAbstract | kAccDefault;
305     return (access_flags & kMask) == kValue;
306   }
307 
308   // Returns true if invoking this method will not throw an AbstractMethodError or
309   // IncompatibleClassChangeError.
310   bool IsInvokable() const {
311     return IsInvokable(GetAccessFlags());
312   }
313 
314   static bool IsInvokable(uint32_t access_flags) {
315     // Default conflicting methods are marked with `kAccAbstract` (as well as `kAccCopied`
316     // and `kAccDefault`) but they are not considered abstract, see `IsAbstract()`.
317     DCHECK_EQ((access_flags & kAccAbstract) == 0,
318               !IsDefaultConflicting(access_flags) && !IsAbstract(access_flags));
319     return (access_flags & kAccAbstract) == 0;
320   }
321 
322   // Returns true if the method is marked as pre-compiled.
323   bool IsPreCompiled() const {
324     return IsPreCompiled(GetAccessFlags());
325   }
326 
327   static bool IsPreCompiled(uint32_t access_flags) {
328     // kAccCompileDontBother and kAccPreCompiled overlap with kAccIntrinsicBits.
329     static_assert((kAccCompileDontBother & kAccIntrinsicBits) != 0);
330     static_assert((kAccPreCompiled & kAccIntrinsicBits) != 0);
331     static constexpr uint32_t kMask = kAccIntrinsic | kAccCompileDontBother | kAccPreCompiled;
332     static constexpr uint32_t kValue = kAccCompileDontBother | kAccPreCompiled;
333     return (access_flags & kMask) == kValue;
334   }
335 
336   void SetPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
337     DCHECK(IsInvokable());
338     DCHECK(IsCompilable());
339     // kAccPreCompiled and kAccCompileDontBother overlaps with kAccIntrinsicBits.
340     // We don't mark the intrinsics as precompiled, which means in JIT zygote
341     // mode, compiled code for intrinsics will not be shared, and apps will
342     // compile intrinsics themselves if needed.
343     if (IsIntrinsic()) {
344       return;
345     }
346     AddAccessFlags(kAccPreCompiled | kAccCompileDontBother);
347   }
348 
349   void ClearPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
350     ClearAccessFlags(kAccPreCompiled | kAccCompileDontBother);
351   }
352 
353   // Returns true if the method resides in shared memory.
354   bool IsMemorySharedMethod() {
355     return IsMemorySharedMethod(GetAccessFlags());
356   }
357 
358   static bool IsMemorySharedMethod(uint32_t access_flags) {
359     // There's an overlap with `kAccMemorySharedMethod` and `kAccIntrinsicBits` but that's OK as
360     // intrinsics are always in the boot image and therefore memory shared.
361     static_assert((kAccMemorySharedMethod & kAccIntrinsicBits) != 0,
362                   "kAccMemorySharedMethod deliberately overlaps intrinsic bits");
363     if (IsIntrinsic(access_flags)) {
364       return true;
365     }
366 
367     return (access_flags & kAccMemorySharedMethod) != 0;
368   }
369 
370   void SetMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
371     DCHECK(!IsIntrinsic());
372     DCHECK(!IsAbstract());
373     AddAccessFlags(kAccMemorySharedMethod);
374   }
375 
376   static uint32_t SetMemorySharedMethod(uint32_t access_flags) {
377     DCHECK(!IsIntrinsic(access_flags));
378     DCHECK(!IsAbstract(access_flags));
379     return access_flags | kAccMemorySharedMethod;
380   }
381 
382   void ClearMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
383     uint32_t access_flags = GetAccessFlags();
384     if (IsIntrinsic(access_flags) || IsAbstract(access_flags)) {
385       return;
386     }
387     if (IsMemorySharedMethod(access_flags)) {
388       ClearAccessFlags(kAccMemorySharedMethod);
389     }
390   }
391 
392   // Returns true if the method can be compiled.
393   bool IsCompilable() const {
394     return IsCompilable(GetAccessFlags());
395   }
396 
397   static bool IsCompilable(uint32_t access_flags) {
398     if (IsIntrinsic(access_flags)) {
399       // kAccCompileDontBother overlaps with kAccIntrinsicBits.
400       return true;
401     }
402     if (IsPreCompiled(access_flags)) {
403       return true;
404     }
405     return (access_flags & kAccCompileDontBother) == 0;
406   }
407 
408   void ClearDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
409     DCHECK(!IsMiranda());
410     ClearAccessFlags(kAccCompileDontBother);
411   }
412 
413   void SetDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
414     DCHECK(!IsMiranda());
415     AddAccessFlags(kAccCompileDontBother);
416   }
417 
418   // This is set by the class linker.
419   bool IsDefault() const {
420     return IsDefault(GetAccessFlags());
421   }
422 
423   static bool IsDefault(uint32_t access_flags) {
424     // The intrinsic bits use `kAccDefault`. However, we don't generate intrinsics for default
425     // methods. Therefore, we check that both `kAccDefault` is set and `kAccIntrinsic` unset.
426     static_assert((kAccDefault & kAccIntrinsicBits) != 0,
427                   "kAccDefault deliberately overlaps intrinsic bits");
428     static constexpr uint32_t kMask = kAccIntrinsic | kAccDefault;
429     static constexpr uint32_t kValue = kAccDefault;
430     return (access_flags & kMask) == kValue;
431   }
432 
433   // Returns true if the method is obsolete.
434   bool IsObsolete() const {
435     return IsObsolete(GetAccessFlags());
436   }
437 
438   static bool IsObsolete(uint32_t access_flags) {
439     return (access_flags & kAccObsoleteMethod) != 0;
440   }
441 
442   void SetIsObsolete() REQUIRES_SHARED(Locks::mutator_lock_) {
443     AddAccessFlags(kAccObsoleteMethod);
444   }
445 
446   // Returns true if the method is native.
447   bool IsNative() const {
448     return IsNative(GetAccessFlags());
449   }
450 
451   static bool IsNative(uint32_t access_flags) {
452     return (access_flags & kAccNative) != 0;
453   }
454 
455   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
456   bool IsFastNative() const {
457     return IsFastNative(GetAccessFlags());
458   }
459 
460   static bool IsFastNative(uint32_t access_flags) {
461     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
462     // The kAccFastNative flag value is used with a different meaning for non-native methods,
463     // so we need to check the kAccNative flag as well.
464     constexpr uint32_t mask = kAccFastNative | kAccNative;
465     return (access_flags & mask) == mask;
466   }
467 
468   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
469   bool IsCriticalNative() const {
470     return IsCriticalNative(GetAccessFlags());
471   }
472 
473   static bool IsCriticalNative(uint32_t access_flags) {
474     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
475     // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
476     // so we need to check the kAccNative flag as well.
477     constexpr uint32_t mask = kAccCriticalNative | kAccNative;
478     return (access_flags & mask) == mask;
479   }
480 
481   // Returns true if the method is managed (not native).
482   bool IsManaged() const {
483     return IsManaged(GetAccessFlags());
484   }
485 
486   static bool IsManaged(uint32_t access_flags) {
487     return !IsNative(access_flags);
488   }
489 
490   // Returns true if the method is managed (not native) and invokable.
491   bool IsManagedAndInvokable() const {
492     return IsManagedAndInvokable(GetAccessFlags());
493   }
494 
495   static bool IsManagedAndInvokable(uint32_t access_flags) {
496     return IsManaged(access_flags) && IsInvokable(access_flags);
497   }
498 
499   // Returns true if the method is abstract.
500   bool IsAbstract() const {
501     return IsAbstract(GetAccessFlags());
502   }
503 
504   static bool IsAbstract(uint32_t access_flags) {
505     // Default confliciting methods have `kAccAbstract` set but they are not actually abstract.
506     return (access_flags & kAccAbstract) != 0 && !IsDefaultConflicting(access_flags);
507   }
508 
509   // Returns true if the method is declared synthetic.
510   bool IsSynthetic() const {
511     return IsSynthetic(GetAccessFlags());
512   }
513 
514   static bool IsSynthetic(uint32_t access_flags) {
515     return (access_flags & kAccSynthetic) != 0;
516   }
517 
518   // Returns true if the method is declared varargs.
519   bool IsVarargs() const {
520     return IsVarargs(GetAccessFlags());
521   }
522 
523   static bool IsVarargs(uint32_t access_flags) {
524     return (access_flags & kAccVarargs) != 0;
525   }
526 
527   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
528 
529   bool IsSignaturePolymorphic() REQUIRES_SHARED(Locks::mutator_lock_);
530 
531   bool SkipAccessChecks() const {
532     // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
533     // so we need to check the kAccNative flag as well.
534     return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
535   }
536 
537   void SetSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
538     // SkipAccessChecks() is applicable only to non-native methods.
539     DCHECK(!IsNative());
540     AddAccessFlags(kAccSkipAccessChecks);
541   }
542   void ClearSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
543     // SkipAccessChecks() is applicable only to non-native methods.
544     DCHECK(!IsNative());
545     ClearAccessFlags(kAccSkipAccessChecks);
546   }
547 
548   // Returns true if the method has previously been warm.
549   bool PreviouslyWarm() const {
550     return PreviouslyWarm(GetAccessFlags());
551   }
552 
553   static bool PreviouslyWarm(uint32_t access_flags) {
554     // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. Return true for intrinsics.
555     constexpr uint32_t mask = kAccPreviouslyWarm | kAccIntrinsic;
556     return (access_flags & mask) != 0u;
557   }
558 
559   void SetPreviouslyWarm() REQUIRES_SHARED(Locks::mutator_lock_) {
560     if (IsIntrinsic()) {
561       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
562       return;
563     }
564     AddAccessFlags(kAccPreviouslyWarm);
565   }
566 
567   // Should this method be run in the interpreter and count locks (e.g., failed structured-
568   // locking verification)?
569   bool MustCountLocks() const {
570     return MustCountLocks(GetAccessFlags());
571   }
572 
573   static bool MustCountLocks(uint32_t access_flags) {
574     if (IsIntrinsic(access_flags)) {
575       return false;
576     }
577     return (access_flags & kAccMustCountLocks) != 0;
578   }
579 
580   void ClearMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
581     ClearAccessFlags(kAccMustCountLocks);
582   }
583 
584   void SetMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
585     ClearAccessFlags(kAccSkipAccessChecks);
586     AddAccessFlags(kAccMustCountLocks);
587   }
588 
589   // Returns true if the method is using the nterp entrypoint fast path.
590   bool HasNterpEntryPointFastPathFlag() const {
591     return HasNterpEntryPointFastPathFlag(GetAccessFlags());
592   }
593 
594   static bool HasNterpEntryPointFastPathFlag(uint32_t access_flags) {
595     constexpr uint32_t mask = kAccNative | kAccNterpEntryPointFastPathFlag;
596     return (access_flags & mask) == kAccNterpEntryPointFastPathFlag;
597   }
598 
599   void SetNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
600     DCHECK(!IsNative());
601     AddAccessFlags(kAccNterpEntryPointFastPathFlag);
602   }
603 
604   void ClearNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
605     DCHECK(!IsNative());
606     ClearAccessFlags(kAccNterpEntryPointFastPathFlag);
607   }
608 
609   void SetNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
610     AddAccessFlags(kAccNterpInvokeFastPathFlag);
611   }
612 
613   void ClearNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
614     ClearAccessFlags(kAccNterpInvokeFastPathFlag);
615   }
616 
617   static uint32_t ClearNterpFastPathFlags(uint32_t access_flags) {
618     // `kAccNterpEntryPointFastPathFlag` has a different use for native methods.
619     if (!IsNative(access_flags)) {
620       access_flags &= ~kAccNterpEntryPointFastPathFlag;
621     }
622     access_flags &= ~kAccNterpInvokeFastPathFlag;
623     return access_flags;
624   }
625 
626   // Returns whether the method is a string constructor. The method must not
627   // be a class initializer. (Class initializers are called from a different
628   // context where we do not need to check for string constructors.)
629   bool IsStringConstructor() REQUIRES_SHARED(Locks::mutator_lock_);
630 
631   // Returns true if this method could be overridden by a default method.
632   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
633 
634   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
635 
636   // Throws the error that would result from trying to invoke this method (i.e.
637   // IncompatibleClassChangeError, AbstractMethodError, or IllegalAccessError).
638   // Only call if !IsInvokable();
639   void ThrowInvocationTimeError(ObjPtr<mirror::Object> receiver)
640       REQUIRES_SHARED(Locks::mutator_lock_);
641 
642   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
643 
644   // Doesn't do erroneous / unresolved class checks.
645   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
646 
647   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
648     return GetMethodIndex();
649   }
650 
651   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
652     // Not called within a transaction.
653     method_index_ = new_method_index;
654   }
655 
656   static constexpr MemberOffset DexMethodIndexOffset() {
657     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
658   }
659 
660   static constexpr MemberOffset MethodIndexOffset() {
661     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
662   }
663 
664   static constexpr MemberOffset ImtIndexOffset() {
665     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, imt_index_));
666   }
667 
668   // Number of 32bit registers that would be required to hold all the arguments
669   static size_t NumArgRegisters(std::string_view shorty);
670 
671   ALWAYS_INLINE uint32_t GetDexMethodIndex() const {
672     return dex_method_index_;
673   }
674 
675   void SetDexMethodIndex(uint32_t new_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
676     // Not called within a transaction.
677     dex_method_index_ = new_idx;
678   }
679 
680   // Lookup the Class from the type index into this method's dex cache.
681   ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
682       REQUIRES_SHARED(Locks::mutator_lock_);
683   // Resolve the Class from the type index into this method's dex cache.
684   ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
685       REQUIRES_SHARED(Locks::mutator_lock_);
686 
687   // Returns true if this method has the same name and signature of the other method.
688   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
689 
690   // Find the method that this method overrides.
691   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
692       REQUIRES_SHARED(Locks::mutator_lock_);
693 
694   // Find the method index for this method within other_dexfile. If this method isn't present then
695   // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
696   // name and signature in the other_dexfile, such as the method index used to resolve this method
697   // in the other_dexfile.
698   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
699                                             uint32_t name_and_signature_idx)
700       REQUIRES_SHARED(Locks::mutator_lock_);
701 
702   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
703       REQUIRES_SHARED(Locks::mutator_lock_);
704 
705   template <char ReturnType, char... ArgType>
706   typename detail::ShortyTraits<ReturnType>::Type
707   InvokeStatic(Thread* self, typename detail::ShortyTraits<ArgType>::Type... args)
708       REQUIRES_SHARED(Locks::mutator_lock_);
709 
710   template <char ReturnType, char... ArgType>
711   typename detail::ShortyTraits<ReturnType>::Type
712   InvokeInstance(Thread* self,
713                  ObjPtr<mirror::Object> receiver,
714                  typename detail::ShortyTraits<ArgType>::Type... args)
715       REQUIRES_SHARED(Locks::mutator_lock_);
716 
717   template <char ReturnType, char... ArgType>
718   typename detail::ShortyTraits<ReturnType>::Type
719   InvokeFinal(Thread* self,
720               ObjPtr<mirror::Object> receiver,
721               typename detail::ShortyTraits<ArgType>::Type... args)
722       REQUIRES_SHARED(Locks::mutator_lock_);
723 
724   template <char ReturnType, char... ArgType>
725   typename detail::ShortyTraits<ReturnType>::Type
726   InvokeVirtual(Thread* self,
727                 ObjPtr<mirror::Object> receiver,
728                 typename detail::ShortyTraits<ArgType>::Type... args)
729       REQUIRES_SHARED(Locks::mutator_lock_);
730 
731   template <char ReturnType, char... ArgType>
732   typename detail::ShortyTraits<ReturnType>::Type
733   InvokeInterface(Thread* self,
734                   ObjPtr<mirror::Object> receiver,
735                   typename detail::ShortyTraits<ArgType>::Type... args)
736       REQUIRES_SHARED(Locks::mutator_lock_);
737 
738   template <char... ArgType, typename HandleScopeType>
739   Handle<mirror::Object> NewObject(HandleScopeType& hs,
740                                    Thread* self,
741                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
742       REQUIRES_SHARED(Locks::mutator_lock_);
743 
744   template <char... ArgType>
745   ObjPtr<mirror::Object> NewObject(Thread* self,
746                                    typename detail::HandleShortyTraits<ArgType>::Type... args)
747       REQUIRES_SHARED(Locks::mutator_lock_);
748 
749   // Returns true if the method needs a class initialization check according to access flags.
750   // Only static methods other than the class initializer need this check.
751   // The caller is responsible for performing the actual check.
752   bool NeedsClinitCheckBeforeCall() const {
753     return NeedsClinitCheckBeforeCall(GetAccessFlags());
754   }
755 
756   static bool NeedsClinitCheckBeforeCall(uint32_t access_flags) {
757     // The class initializer is special as it is invoked during initialization
758     // and does not need the check.
759     return IsStatic(access_flags) && !IsConstructor(access_flags);
760   }
761 
762   // Check if the method needs a class initialization check before call
763   // and its declaring class is not yet visibly initialized.
764   // (The class needs to be visibly initialized before we can use entrypoints
765   // to compiled code for static methods. See b/18161648 .)
766   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
767   bool StillNeedsClinitCheck() REQUIRES_SHARED(Locks::mutator_lock_);
768 
769   // Similar to `StillNeedsClinitCheck()` but the method's declaring class may
770   // be dead but not yet reclaimed by the GC, so we cannot do a full read barrier
771   // but we still want to check the class status in the to-space class if any.
772   // Note: JIT can hold and use such methods during managed heap GC.
773   bool StillNeedsClinitCheckMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
774 
775   // Check if the declaring class has been verified and look at the to-space
776   // class object, if any, as in `StillNeedsClinitCheckMayBeDead()`.
777   bool IsDeclaringClassVerifiedMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
778 
779   const void* GetEntryPointFromQuickCompiledCode() const {
780     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
781   }
782   ALWAYS_INLINE
783   const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) const {
784     return GetNativePointer<const void*>(
785         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
786   }
787 
788   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
789       REQUIRES_SHARED(Locks::mutator_lock_) {
790     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
791                                               kRuntimePointerSize);
792   }
793   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
794       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size)
795       REQUIRES_SHARED(Locks::mutator_lock_) {
796     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
797                      entry_point_from_quick_compiled_code,
798                      pointer_size);
799   }
800 
801   static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
802     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
803         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
804   }
805 
806   static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
807     return DataOffset(pointer_size);
808   }
809 
810   static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
811     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
812         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
813             * static_cast<size_t>(pointer_size));
814   }
815 
816   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) const {
817     DCHECK(IsRuntimeMethod());
818     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
819   }
820 
821   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size)
822       REQUIRES_SHARED(Locks::mutator_lock_) {
823     DCHECK(IsRuntimeMethod());
824     SetDataPtrSize(table, pointer_size);
825   }
826 
827   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
828 
829   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl)
830       REQUIRES_SHARED(Locks::mutator_lock_) {
831     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
832     if (single_impl) {
833       AddAccessFlags(kAccSingleImplementation);
834     } else {
835       ClearAccessFlags(kAccSingleImplementation);
836     }
837   }
838 
839   ALWAYS_INLINE bool HasSingleImplementationFlag() const {
840     return (GetAccessFlags() & kAccSingleImplementation) != 0;
841   }
842 
843   static uint32_t SetHasSingleImplementation(uint32_t access_flags, bool single_impl) {
844     DCHECK(!IsIntrinsic(access_flags)) << "conflict with intrinsic bits";
845     if (single_impl) {
846       return access_flags | kAccSingleImplementation;
847     } else {
848       return access_flags & ~kAccSingleImplementation;
849     }
850   }
851 
852   // Takes a method and returns a 'canonical' one if the method is default (and therefore
853   // potentially copied from some other class). For example, this ensures that the debugger does not
854   // get confused as to which method we are in.
855   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
856       REQUIRES_SHARED(Locks::mutator_lock_);
857 
858   ArtMethod* GetSingleImplementation(PointerSize pointer_size);
859 
860   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size)
861       REQUIRES_SHARED(Locks::mutator_lock_) {
862     DCHECK(!IsNative());
863     // Non-abstract method's single implementation is just itself.
864     DCHECK(IsAbstract());
865     DCHECK(method == nullptr || method->IsInvokable());
866     SetDataPtrSize(method, pointer_size);
867   }
868 
869   void* GetEntryPointFromJni() const {
870     DCHECK(IsNative());
871     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
872   }
873 
874   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) const {
875     return GetDataPtrSize(pointer_size);
876   }
877 
878   void SetEntryPointFromJni(const void* entrypoint)
879       REQUIRES_SHARED(Locks::mutator_lock_) {
880     // The resolution method also has a JNI entrypoint for direct calls from
881     // compiled code to the JNI dlsym lookup stub for @CriticalNative.
882     DCHECK(IsNative() || IsRuntimeMethod());
883     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
884   }
885 
886   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size)
887       REQUIRES_SHARED(Locks::mutator_lock_) {
888     SetDataPtrSize(entrypoint, pointer_size);
889   }
890 
891   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) const {
892     DCHECK(IsImagePointerSize(pointer_size));
893     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
894   }
895 
896   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size)
897       REQUIRES_SHARED(Locks::mutator_lock_) {
898     DCHECK(IsImagePointerSize(pointer_size));
899     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
900   }
901 
902   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
903   // conventions for a method of managed code. Returns false for Proxy methods.
904   ALWAYS_INLINE bool IsRuntimeMethod() const {
905     return dex_method_index_ == kRuntimeMethodDexMethodIndex;
906   }
907 
908   bool HasCodeItem() REQUIRES_SHARED(Locks::mutator_lock_) {
909     uint32_t access_flags = GetAccessFlags();
910     return !IsNative(access_flags) &&
911            !IsAbstract(access_flags) &&
912            !IsDefaultConflicting(access_flags) &&
913            !IsRuntimeMethod() &&
914            !IsProxyMethod();
915   }
916 
917   // We need to explicitly indicate whether the code item is obtained from the compact dex file,
918   // because in JVMTI, we obtain the code item from the standard dex file to update the method.
919   void SetCodeItem(const dex::CodeItem* code_item, bool is_compact_dex_code_item)
920       REQUIRES_SHARED(Locks::mutator_lock_);
921 
922   // Is this a hand crafted method used for something like describing callee saves?
923   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
924 
925   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
926 
927   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
928 
929   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
930   // indicates whether the found catch block is responsible for clearing the exception or whether
931   // a move-exception instruction is present.
932   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
933                           bool* has_no_move_exception)
934       REQUIRES_SHARED(Locks::mutator_lock_);
935 
936   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
937   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
938            bool kVisitProxyMethod = true,
939            typename RootVisitorType>
940   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
941 
942   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
943 
944   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
945   std::string_view GetDeclaringClassDescriptorView() REQUIRES_SHARED(Locks::mutator_lock_);
946 
947   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
948 
949   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
950 
951   std::string_view GetShortyView() REQUIRES_SHARED(Locks::mutator_lock_);
952 
953   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
954 
955   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
956 
957   ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_);
958 
959   ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_);
960 
961   bool NameEquals(ObjPtr<mirror::String> name) REQUIRES_SHARED(Locks::mutator_lock_);
962 
963   const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
964 
965   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
966 
967   const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
968 
969   const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
970 
971   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
972 
973   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
974 
975   const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
976 
977   ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
978 
979   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
980   std::string_view GetReturnTypeDescriptorView() REQUIRES_SHARED(Locks::mutator_lock_);
981 
982   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
983 
984   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
985       REQUIRES_SHARED(Locks::mutator_lock_);
986 
987   // Lookup return type.
988   ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
989   // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
990   // calling ResolveType this caused a large number of bugs at call sites.
991   ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
992 
993   ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
994 
995   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
996   ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
997   template <ReadBarrierOption kReadBarrierOption>
998   ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
999 
1000   ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
1001       REQUIRES_SHARED(Locks::mutator_lock_);
1002   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
1003       REQUIRES_SHARED(Locks::mutator_lock_);
1004 
1005   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
1006 
1007   // May cause thread suspension due to class resolution.
1008   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
1009       REQUIRES_SHARED(Locks::mutator_lock_);
1010 
1011   // Size of an instance of this native class.
1012   static constexpr size_t Size(PointerSize pointer_size) {
1013     return PtrSizedFieldsOffset(pointer_size) +
1014         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
1015   }
1016 
1017   // Alignment of an instance of this native class.
1018   static constexpr size_t Alignment(PointerSize pointer_size) {
1019     // The ArtMethod alignment is the same as image pointer size. This differs from
1020     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
1021     return static_cast<size_t>(pointer_size);
1022   }
1023 
1024   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
1025       REQUIRES_SHARED(Locks::mutator_lock_);
1026 
1027   ALWAYS_INLINE void ResetCounter(uint16_t new_value);
1028   ALWAYS_INLINE void UpdateCounter(int32_t new_samples);
1029   ALWAYS_INLINE void SetHotCounter();
1030   ALWAYS_INLINE bool CounterIsHot();
1031   ALWAYS_INLINE uint16_t GetCounter();
1032   ALWAYS_INLINE bool CounterHasChanged(uint16_t threshold);
1033 
1034   ALWAYS_INLINE static constexpr uint16_t MaxCounter() {
1035     return std::numeric_limits<decltype(hotness_count_)>::max();
1036   }
1037 
1038   ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1039 
1040   void CalculateAndSetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1041 
1042   static constexpr MemberOffset HotnessCountOffset() {
1043     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
1044   }
1045 
1046   // Returns the method header for the compiled code containing 'pc'. Note that runtime
1047   // methods will return null for this method, as they are not oat based.
1048   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
1049       REQUIRES_SHARED(Locks::mutator_lock_);
1050 
1051   // Get compiled code for the method, return null if no code exists.
1052   const void* GetOatMethodQuickCode(PointerSize pointer_size)
1053       REQUIRES_SHARED(Locks::mutator_lock_);
1054 
1055   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
1056   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
1057   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
1058       REQUIRES_SHARED(Locks::mutator_lock_);
1059   std::string PrettyMethod(bool with_signature = true)
1060       REQUIRES_SHARED(Locks::mutator_lock_);
1061   // Returns the JNI native function name for the non-overloaded method 'm'.
1062   std::string JniShortName()
1063       REQUIRES_SHARED(Locks::mutator_lock_);
1064   // Returns the JNI native function name for the overloaded method 'm'.
1065   std::string JniLongName()
1066       REQUIRES_SHARED(Locks::mutator_lock_);
1067 
1068   // Visit the individual members of an ArtMethod.  Used by imgdiag.
1069   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
1070   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
1071   template <typename VisitorFunc>
1072   void VisitMembers(VisitorFunc& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1073     DCHECK(IsImagePointerSize(kRuntimePointerSize));
1074     visitor(this, &declaring_class_, "declaring_class_");
1075     visitor(this, &access_flags_, "access_flags_");
1076     visitor(this, &dex_method_index_, "dex_method_index_");
1077     visitor(this, &method_index_, "method_index_");
1078     visitor(this, &hotness_count_, "hotness_count_");
1079     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
1080     visitor(this,
1081             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
1082             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
1083   }
1084 
1085   // Returns the dex instructions of the code item for the art method. Returns an empty array for
1086   // the null code item case.
1087   ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
1088       REQUIRES_SHARED(Locks::mutator_lock_);
1089 
1090   // Returns the dex code item data section of the DexFile for the art method.
1091   ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
1092       REQUIRES_SHARED(Locks::mutator_lock_);
1093 
1094   // Returns the dex code item debug info section of the DexFile for the art method.
1095   ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
1096       REQUIRES_SHARED(Locks::mutator_lock_);
1097 
1098   GcRoot<mirror::Class>& DeclaringClassRoot() {
1099     return declaring_class_;
1100   }
1101 
1102  protected:
1103   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
1104   // The class we are a part of.
1105   GcRoot<mirror::Class> declaring_class_;
1106 
1107   // Access flags; low 16 bits are defined by spec.
1108   // Getting and setting this flag needs to be atomic when concurrency is
1109   // possible, e.g. after this method's class is linked. Such as when setting
1110   // verifier flags and single-implementation flag.
1111   std::atomic<std::uint32_t> access_flags_;
1112 
1113   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
1114 
1115   // Index into method_ids of the dex file associated with this method.
1116   uint32_t dex_method_index_;
1117 
1118   /* End of dex file fields. */
1119 
1120   // Entry within a dispatch table for this method. For static/direct methods the index is into
1121   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
1122   // interface's method array in `IfTable`s of implementing classes.
1123   uint16_t method_index_;
1124 
1125   union {
1126     // Non-abstract methods: The hotness we measure for this method. Not atomic,
1127     // as we allow missing increments: if the method is hot, we will see it eventually.
1128     uint16_t hotness_count_;
1129     // Abstract methods: IMT index.
1130     uint16_t imt_index_;
1131   };
1132 
1133   // Fake padding field gets inserted here.
1134 
1135   // Must be the last fields in the method.
1136   struct PtrSizedFields {
1137     // Depending on the method type, the data is
1138     //   - native method: pointer to the JNI function registered to this method
1139     //                    or a function to resolve the JNI function,
1140     //   - resolution method: pointer to a function to resolve the method and
1141     //                        the JNI function for @CriticalNative.
1142     //   - conflict method: ImtConflictTable,
1143     //   - abstract/interface method: the single-implementation if any,
1144     //   - proxy method: the original interface method or constructor,
1145     //   - default conflict method: null
1146     //   - other methods: during AOT the code item offset, at runtime a pointer
1147     //                    to the code item.
1148     void* data_;
1149 
1150     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
1151     // the interpreter.
1152     void* entry_point_from_quick_compiled_code_;
1153   } ptr_sized_fields_;
1154 
1155  private:
1156   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1157 
1158   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
1159     // Round up to pointer size for padding field. Tested in art_method.cc.
1160     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
1161                    static_cast<size_t>(pointer_size));
1162   }
1163 
1164   // Compare given pointer size to the image pointer size.
1165   static bool IsImagePointerSize(PointerSize pointer_size);
1166 
1167   dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
1168 
1169   template<typename T>
1170   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
1171     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1172     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1173     if (pointer_size == PointerSize::k32) {
1174       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
1175     } else {
1176       auto v = *reinterpret_cast<const uint64_t*>(addr);
1177       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
1178     }
1179   }
1180 
1181   template<typename T>
1182   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
1183       REQUIRES_SHARED(Locks::mutator_lock_) {
1184     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
1185     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
1186     if (pointer_size == PointerSize::k32) {
1187       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
1188       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
1189     } else {
1190       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
1191     }
1192   }
1193 
1194   static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
1195     return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
1196             ((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
1197             ((modifier & kAccIntrinsicBits) != 0));  // b/228049006: ensure intrinsic is not `kNone`
1198   }
1199 
1200   static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
1201     return (modifier & kAccIntrinsicBits) != 0;
1202   }
1203 
1204   // This setter guarantees atomicity.
1205   void AddAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1206     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1207     // None of the readers rely ordering.
1208     access_flags_.fetch_or(flag, std::memory_order_relaxed);
1209   }
1210 
1211   // This setter guarantees atomicity.
1212   void ClearAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
1213     DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
1214     access_flags_.fetch_and(~flag, std::memory_order_relaxed);
1215   }
1216 
1217   // Helper method for checking the class status of a possibly dead declaring class.
1218   // See `StillNeedsClinitCheckMayBeDead()` and `IsDeclaringClassVerifierMayBeDead()`.
1219   ObjPtr<mirror::Class> GetDeclaringClassMayBeDead() REQUIRES_SHARED(Locks::mutator_lock_);
1220 
1221   // Used by GetName and GetNameView to share common code.
1222   const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
1223 
1224   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
1225 };
1226 
1227 class MethodCallback {
1228  public:
1229   virtual ~MethodCallback() {}
1230 
1231   virtual void RegisterNativeMethod(ArtMethod* method,
1232                                     const void* original_implementation,
1233                                     /*out*/void** new_implementation)
1234       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
1235 };
1236 
1237 }  // namespace art
1238 
1239 #endif  // ART_RUNTIME_ART_METHOD_H_
1240