1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_HANDLE_SCOPE_H_
18 #define ART_RUNTIME_HANDLE_SCOPE_H_
19 
20 #include <stack>
21 
22 #include <android-base/logging.h>
23 
24 #include "base/locks.h"
25 #include "base/macros.h"
26 #include "base/pointer_size.h"
27 #include "stack_reference.h"
28 
29 namespace art HIDDEN {
30 
31 template<class T> class Handle;
32 class HandleScope;
33 template<class T> class HandleWrapper;
34 template<class T> class HandleWrapperObjPtr;
35 template<class T> class MutableHandle;
36 template<class MirrorType> class ObjPtr;
37 class Thread;
38 class VariableSizedHandleScope;
39 
40 namespace mirror {
41 class Object;
42 }  // namespace mirror
43 
44 // Basic handle scope, tracked by a list. May be variable sized.
45 class PACKED(4) BaseHandleScope {
46  public:
IsVariableSized()47   bool IsVariableSized() const {
48     return capacity_ == kNumReferencesVariableSized;
49   }
50 
51   // The current size of this handle scope.
52   ALWAYS_INLINE uint32_t Size() const;
53 
54   // The current capacity of this handle scope.
55   // It can change (increase) only for a `VariableSizedHandleScope`.
56   ALWAYS_INLINE uint32_t Capacity() const;
57 
58   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
59 
60   template <typename Visitor>
61   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
62 
63   template <typename Visitor>
64   ALWAYS_INLINE void VisitHandles(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
65 
66   // Link to previous BaseHandleScope or null.
GetLink()67   BaseHandleScope* GetLink() const {
68     return link_;
69   }
70 
71   ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
72   ALWAYS_INLINE HandleScope* AsHandleScope();
73   ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
74   ALWAYS_INLINE const HandleScope* AsHandleScope() const;
75 
76  protected:
BaseHandleScope(BaseHandleScope * link,uint32_t capacity)77   BaseHandleScope(BaseHandleScope* link, uint32_t capacity)
78       : link_(link),
79         capacity_(capacity) {}
80 
81   // Variable sized constructor.
BaseHandleScope(BaseHandleScope * link)82   explicit BaseHandleScope(BaseHandleScope* link)
83       : link_(link),
84         capacity_(kNumReferencesVariableSized) {}
85 
86   static constexpr int32_t kNumReferencesVariableSized = -1;
87 
88   // Link-list of handle scopes. The root is held by a Thread.
89   BaseHandleScope* const link_;
90 
91   // Number of handlerized references. -1 for variable sized handle scopes.
92   const int32_t capacity_;
93 
94  private:
95   DISALLOW_COPY_AND_ASSIGN(BaseHandleScope);
96 };
97 
98 // HandleScopes are scoped objects containing a number of Handles. They are used to allocate
99 // handles, for these handles (and the objects contained within them) to be visible/roots for the
100 // GC. It is most common to stack allocate HandleScopes using StackHandleScope.
101 class PACKED(4) HandleScope : public BaseHandleScope {
102  public:
~HandleScope()103   ~HandleScope() {}
104 
105   ALWAYS_INLINE ObjPtr<mirror::Object> GetReference(size_t i) const
106       REQUIRES_SHARED(Locks::mutator_lock_);
107 
108   template<class T>
109   ALWAYS_INLINE Handle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_);
110 
111   template<class T>
112   ALWAYS_INLINE MutableHandle<T> GetMutableHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_);
113 
114   ALWAYS_INLINE void SetReference(size_t i, ObjPtr<mirror::Object> object)
115       REQUIRES_SHARED(Locks::mutator_lock_);
116 
117   template<class T>
118   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
119 
120   template<class T>
121   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
122       REQUIRES_SHARED(Locks::mutator_lock_);
123 
124   template<class T>
125   ALWAYS_INLINE HandleWrapperObjPtr<T> NewHandleWrapper(ObjPtr<T>* object)
126       REQUIRES_SHARED(Locks::mutator_lock_);
127 
128   template<class MirrorType>
129   ALWAYS_INLINE MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> object)
130     REQUIRES_SHARED(Locks::mutator_lock_);
131 
132   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
133 
134   // Offset of link within HandleScope, used by generated code.
LinkOffset(PointerSize pointer_size)135   static constexpr size_t LinkOffset([[maybe_unused]] PointerSize pointer_size) { return 0; }
136 
137   // Offset of length within handle scope, used by generated code.
CapacityOffset(PointerSize pointer_size)138   static constexpr size_t CapacityOffset(PointerSize pointer_size) {
139     return static_cast<size_t>(pointer_size);
140   }
141 
142   // Offset of link within handle scope, used by generated code.
ReferencesOffset(PointerSize pointer_size)143   static constexpr size_t ReferencesOffset(PointerSize pointer_size) {
144     return CapacityOffset(pointer_size) + sizeof(capacity_) + sizeof(size_);
145   }
146 
147   // The current size of this handle scope.
Size()148   ALWAYS_INLINE uint32_t Size() const {
149     return size_;
150   }
151 
152   // The capacity of this handle scope, immutable.
Capacity()153   ALWAYS_INLINE uint32_t Capacity() const {
154     DCHECK_GT(capacity_, 0);
155     return static_cast<uint32_t>(capacity_);
156   }
157 
158   template <typename Visitor>
159   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
160 
161   template <typename Visitor>
162   ALWAYS_INLINE void VisitHandles(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
163 
164  protected:
165   // Return backing storage used for references.
GetReferences()166   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
167     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(kRuntimePointerSize);
168     return reinterpret_cast<StackReference<mirror::Object>*>(address);
169   }
170 
HandleScope(size_t capacity)171   explicit HandleScope(size_t capacity) : HandleScope(nullptr, capacity) {}
172 
HandleScope(BaseHandleScope * link,uint32_t capacity)173   HandleScope(BaseHandleScope* link, uint32_t capacity)
174       : BaseHandleScope(link, capacity) {
175     // Handle scope should be created only if we have a code path that stores something in it.
176     // We may not take that code path and the handle scope may remain empty.
177     DCHECK_NE(capacity, 0u);
178   }
179 
180   // Position new handles will be created.
181   uint32_t size_ = 0;
182 
183   // Storage for references is in derived classes.
184   // StackReference<mirror::Object> references_[capacity_]
185 
186  private:
187   DISALLOW_COPY_AND_ASSIGN(HandleScope);
188 };
189 
190 // Fixed size handle scope that is not necessarily linked in the thread.
191 template<size_t kNumReferences>
192 class PACKED(4) FixedSizeHandleScope : public HandleScope {
193  private:
194   explicit ALWAYS_INLINE FixedSizeHandleScope(BaseHandleScope* link)
195       REQUIRES_SHARED(Locks::mutator_lock_);
~FixedSizeHandleScope()196   ALWAYS_INLINE ~FixedSizeHandleScope() REQUIRES_SHARED(Locks::mutator_lock_) {}
197 
198   // Reference storage.
199   StackReference<mirror::Object> storage_[kNumReferences];
200 
201   template<size_t kNumRefs> friend class StackHandleScope;
202   friend class VariableSizedHandleScope;
203 };
204 
205 // Scoped handle storage of a fixed size that is stack allocated.
206 template<size_t kNumReferences>
207 class PACKED(4) StackHandleScope final : public FixedSizeHandleScope<kNumReferences> {
208  public:
209   explicit ALWAYS_INLINE StackHandleScope(Thread* self)
210       REQUIRES_SHARED(Locks::mutator_lock_);
211 
212   ALWAYS_INLINE ~StackHandleScope() REQUIRES_SHARED(Locks::mutator_lock_);
213 
Self()214   Thread* Self() const {
215     return self_;
216   }
217 
218  private:
219   // The thread that the stack handle scope is a linked list upon. The stack handle scope will
220   // push and pop itself from this thread.
221   Thread* const self_;
222 };
223 
224 // Utility class to manage a variable sized handle scope by having a list of fixed size handle
225 // scopes.
226 // Calls to NewHandle will create a new handle inside the current FixedSizeHandleScope.
227 // When the current handle scope becomes full a new one is created and put at the front of the
228 // list.
229 class VariableSizedHandleScope : public BaseHandleScope {
230  public:
231   explicit VariableSizedHandleScope(Thread* const self) REQUIRES_SHARED(Locks::mutator_lock_);
232   ~VariableSizedHandleScope() REQUIRES_SHARED(Locks::mutator_lock_);
233 
234   template<class T>
235   MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
236 
237   template<class MirrorType>
238   MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> ptr)
239       REQUIRES_SHARED(Locks::mutator_lock_);
240 
241   // The current size of this handle scope.
242   ALWAYS_INLINE uint32_t Size() const;
243 
244   // The current capacity of this handle scope.
245   ALWAYS_INLINE uint32_t Capacity() const;
246 
247   // Retrieve a `Handle<>` based on the slot index (in handle creation order).
248   // Note: This is linear in the size of the scope, so it should be used carefully.
249   template<class T>
250   ALWAYS_INLINE Handle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_);
251 
252   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
253 
254   template <typename Visitor>
255   void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
256 
257   template <typename Visitor>
258   ALWAYS_INLINE void VisitHandles(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
259 
260  private:
261   static constexpr size_t kLocalScopeSize = 64u;
262   static constexpr size_t kSizeOfReferencesPerScope =
263       kLocalScopeSize
264           - /* BaseHandleScope::link_ */ sizeof(BaseHandleScope*)
265           - /* BaseHandleScope::capacity_ */ sizeof(int32_t)
266           - /* HandleScope<>::size_ */ sizeof(uint32_t);
267   static constexpr size_t kNumReferencesPerScope =
268       kSizeOfReferencesPerScope / sizeof(StackReference<mirror::Object>);
269 
270   Thread* const self_;
271 
272   // Linked list of fixed size handle scopes.
273   using LocalScopeType = FixedSizeHandleScope<kNumReferencesPerScope>;
274   static_assert(sizeof(LocalScopeType) == kLocalScopeSize, "Unexpected size of LocalScopeType");
275   LocalScopeType* current_scope_;
276   LocalScopeType first_scope_;
277 
278   DISALLOW_COPY_AND_ASSIGN(VariableSizedHandleScope);
279 };
280 
281 }  // namespace art
282 
283 #endif  // ART_RUNTIME_HANDLE_SCOPE_H_
284