1 /*
2  * Copyright 2021 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
19 
20 #include "gc/space/bump_pointer_space.h"
21 #include "mark_compact.h"
22 #include "mirror/object-inl.h"
23 
24 namespace art HIDDEN {
25 namespace gc {
26 namespace collector {
27 
UpdateClassAfterObjectMap(mirror::Object * obj)28 inline void MarkCompact::UpdateClassAfterObjectMap(mirror::Object* obj) {
29   mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
30   // Track a class if it needs walking super-classes for visiting references or
31   // if it's higher in address order than its objects and is in moving space.
32   if (UNLIKELY(
33           (std::less<mirror::Object*>{}(obj, klass) && HasAddress(klass)) ||
34           (klass->GetReferenceInstanceOffsets<kVerifyNone>() == mirror::Class::kClassWalkSuper &&
35            walk_super_class_cache_ != klass))) {
36     // Since this function gets invoked in the compaction pause as well, it is
37     // preferable to store such super class separately rather than updating key
38     // as the latter would require traversing the hierarchy for every object of 'klass'.
39     auto ret1 = class_after_obj_hash_map_.try_emplace(ObjReference::FromMirrorPtr(klass),
40                                                       ObjReference::FromMirrorPtr(obj));
41     if (ret1.second) {
42       if (klass->GetReferenceInstanceOffsets<kVerifyNone>() == mirror::Class::kClassWalkSuper) {
43         // In this case we require traversing through the super class hierarchy
44         // and find the super class at the highest address order.
45         mirror::Class* highest_klass = HasAddress(klass) ? klass : nullptr;
46         for (ObjPtr<mirror::Class> k = klass->GetSuperClass<kVerifyNone, kWithoutReadBarrier>();
47              k != nullptr;
48              k = k->GetSuperClass<kVerifyNone, kWithoutReadBarrier>()) {
49           // TODO: Can we break once we encounter a super class outside the moving space?
50           if (HasAddress(k.Ptr())) {
51             highest_klass = std::max(highest_klass, k.Ptr(), std::less<mirror::Class*>());
52           }
53         }
54         if (highest_klass != nullptr && highest_klass != klass) {
55           auto ret2 = super_class_after_class_hash_map_.try_emplace(
56               ObjReference::FromMirrorPtr(klass), ObjReference::FromMirrorPtr(highest_klass));
57           DCHECK(ret2.second);
58         } else {
59           walk_super_class_cache_ = klass;
60         }
61       }
62     } else if (std::less<mirror::Object*>{}(obj, ret1.first->second.AsMirrorPtr())) {
63       ret1.first->second = ObjReference::FromMirrorPtr(obj);
64     }
65   }
66 }
67 
68 template <size_t kAlignment>
SetLiveWords(uintptr_t begin,size_t size)69 inline uintptr_t MarkCompact::LiveWordsBitmap<kAlignment>::SetLiveWords(uintptr_t begin,
70                                                                         size_t size) {
71   const uintptr_t begin_bit_idx = MemRangeBitmap::BitIndexFromAddr(begin);
72   DCHECK(!Bitmap::TestBit(begin_bit_idx));
73   // Range to set bit: [begin, end]
74   uintptr_t end = begin + size - kAlignment;
75   const uintptr_t end_bit_idx = MemRangeBitmap::BitIndexFromAddr(end);
76   uintptr_t* begin_bm_address = Bitmap::Begin() + Bitmap::BitIndexToWordIndex(begin_bit_idx);
77   uintptr_t* end_bm_address = Bitmap::Begin() + Bitmap::BitIndexToWordIndex(end_bit_idx);
78   ptrdiff_t diff = end_bm_address - begin_bm_address;
79   uintptr_t mask = Bitmap::BitIndexToMask(begin_bit_idx);
80   // Bits that needs to be set in the first word, if it's not also the last word
81   mask = ~(mask - 1);
82   if (diff > 0) {
83     *begin_bm_address |= mask;
84     mask = ~0;
85     // Even though memset can handle the (diff == 1) case but we should avoid the
86     // overhead of a function call for this, highly likely (as most of the objects
87     // are small), case.
88     if (diff > 1) {
89       // Set all intermediate bits to 1.
90       std::memset(static_cast<void*>(begin_bm_address + 1), 0xff, (diff - 1) * sizeof(uintptr_t));
91     }
92   }
93   uintptr_t end_mask = Bitmap::BitIndexToMask(end_bit_idx);
94   *end_bm_address |= mask & (end_mask | (end_mask - 1));
95   return begin_bit_idx;
96 }
97 
98 template <size_t kAlignment> template <typename Visitor>
VisitLiveStrides(uintptr_t begin_bit_idx,uint8_t * end,const size_t bytes,Visitor && visitor)99 inline void MarkCompact::LiveWordsBitmap<kAlignment>::VisitLiveStrides(uintptr_t begin_bit_idx,
100                                                                        uint8_t* end,
101                                                                        const size_t bytes,
102                                                                        Visitor&& visitor) const {
103   // Range to visit [begin_bit_idx, end_bit_idx]
104   DCHECK(IsAligned<kAlignment>(end));
105   end -= kAlignment;
106   const uintptr_t end_bit_idx = MemRangeBitmap::BitIndexFromAddr(reinterpret_cast<uintptr_t>(end));
107   DCHECK_LE(begin_bit_idx, end_bit_idx);
108   uintptr_t begin_word_idx = Bitmap::BitIndexToWordIndex(begin_bit_idx);
109   const uintptr_t end_word_idx = Bitmap::BitIndexToWordIndex(end_bit_idx);
110   DCHECK(Bitmap::TestBit(begin_bit_idx));
111   size_t stride_size = 0;
112   size_t idx_in_word = 0;
113   size_t num_heap_words = bytes / kAlignment;
114   uintptr_t live_stride_start_idx;
115   uintptr_t word = Bitmap::Begin()[begin_word_idx];
116 
117   // Setup the first word.
118   word &= ~(Bitmap::BitIndexToMask(begin_bit_idx) - 1);
119   begin_bit_idx = RoundDown(begin_bit_idx, Bitmap::kBitsPerBitmapWord);
120 
121   do {
122     if (UNLIKELY(begin_word_idx == end_word_idx)) {
123       uintptr_t mask = Bitmap::BitIndexToMask(end_bit_idx);
124       word &= mask | (mask - 1);
125     }
126     if (~word == 0) {
127       // All bits in the word are marked.
128       if (stride_size == 0) {
129         live_stride_start_idx = begin_bit_idx;
130       }
131       stride_size += Bitmap::kBitsPerBitmapWord;
132       if (num_heap_words <= stride_size) {
133         break;
134       }
135     } else {
136       while (word != 0) {
137         // discard 0s
138         size_t shift = CTZ(word);
139         idx_in_word += shift;
140         word >>= shift;
141         if (stride_size > 0) {
142           if (shift > 0) {
143             if (num_heap_words <= stride_size) {
144               break;
145             }
146             visitor(live_stride_start_idx, stride_size, /*is_last*/ false);
147             num_heap_words -= stride_size;
148             live_stride_start_idx = begin_bit_idx + idx_in_word;
149             stride_size = 0;
150           }
151         } else {
152           live_stride_start_idx = begin_bit_idx + idx_in_word;
153         }
154         // consume 1s
155         shift = CTZ(~word);
156         DCHECK_NE(shift, 0u);
157         word >>= shift;
158         idx_in_word += shift;
159         stride_size += shift;
160       }
161       // If the whole word == 0 or the higher bits are 0s, then we exit out of
162       // the above loop without completely consuming the word, so call visitor,
163       // if needed.
164       if (idx_in_word < Bitmap::kBitsPerBitmapWord && stride_size > 0) {
165         if (num_heap_words <= stride_size) {
166           break;
167         }
168         visitor(live_stride_start_idx, stride_size, /*is_last*/ false);
169         num_heap_words -= stride_size;
170         stride_size = 0;
171       }
172       idx_in_word = 0;
173     }
174     begin_bit_idx += Bitmap::kBitsPerBitmapWord;
175     begin_word_idx++;
176     if (UNLIKELY(begin_word_idx > end_word_idx)) {
177       num_heap_words = std::min(stride_size, num_heap_words);
178       break;
179     }
180     word = Bitmap::Begin()[begin_word_idx];
181   } while (true);
182 
183   if (stride_size > 0) {
184     visitor(live_stride_start_idx, num_heap_words, /*is_last*/ true);
185   }
186 }
187 
188 template <size_t kAlignment>
189 inline
FindNthLiveWordOffset(size_t chunk_idx,uint32_t n)190 uint32_t MarkCompact::LiveWordsBitmap<kAlignment>::FindNthLiveWordOffset(size_t chunk_idx,
191                                                                          uint32_t n) const {
192   DCHECK_LT(n, kBitsPerVectorWord);
193   const size_t index = chunk_idx * kBitmapWordsPerVectorWord;
194   for (uint32_t i = 0; i < kBitmapWordsPerVectorWord; i++) {
195     uintptr_t word = Bitmap::Begin()[index + i];
196     if (~word == 0) {
197       if (n < Bitmap::kBitsPerBitmapWord) {
198         return i * Bitmap::kBitsPerBitmapWord + n;
199       }
200       n -= Bitmap::kBitsPerBitmapWord;
201     } else {
202       uint32_t j = 0;
203       while (word != 0) {
204         // count contiguous 0s
205         uint32_t shift = CTZ(word);
206         word >>= shift;
207         j += shift;
208         // count contiguous 1s
209         shift = CTZ(~word);
210         DCHECK_NE(shift, 0u);
211         if (shift > n) {
212           return i * Bitmap::kBitsPerBitmapWord + j + n;
213         }
214         n -= shift;
215         word >>= shift;
216         j += shift;
217       }
218     }
219   }
220   LOG(FATAL) << "Unreachable";
221   UNREACHABLE();
222 }
223 
UpdateRef(mirror::Object * obj,MemberOffset offset,uint8_t * begin,uint8_t * end)224 inline void MarkCompact::UpdateRef(mirror::Object* obj,
225                                    MemberOffset offset,
226                                    uint8_t* begin,
227                                    uint8_t* end) {
228   mirror::Object* old_ref = obj->GetFieldObject<
229       mirror::Object, kVerifyNone, kWithoutReadBarrier, /*kIsVolatile*/false>(offset);
230   if (kIsDebugBuild) {
231     if (HasAddress(old_ref) &&
232         reinterpret_cast<uint8_t*>(old_ref) < black_allocations_begin_ &&
233         !moving_space_bitmap_->Test(old_ref)) {
234       mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
235       std::ostringstream oss;
236       heap_->DumpSpaces(oss);
237       MemMap::DumpMaps(oss, /* terse= */ true);
238       LOG(FATAL) << "Not marked in the bitmap ref=" << old_ref
239                  << " from_ref=" << from_ref
240                  << " offset=" << offset
241                  << " obj=" << obj
242                  << " obj-validity=" << IsValidObject(obj)
243                  << " from-space=" << static_cast<void*>(from_space_begin_)
244                  << " bitmap= " << moving_space_bitmap_->DumpMemAround(old_ref)
245                  << " from_ref "
246                  << heap_->GetVerification()->DumpRAMAroundAddress(
247                      reinterpret_cast<uintptr_t>(from_ref), 128)
248                  << " obj "
249                  << heap_->GetVerification()->DumpRAMAroundAddress(
250                      reinterpret_cast<uintptr_t>(obj), 128)
251                  << " old_ref " << heap_->GetVerification()->DumpRAMAroundAddress(
252                      reinterpret_cast<uintptr_t>(old_ref), 128)
253                  << " maps\n" << oss.str();
254     }
255   }
256   mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
257   if (new_ref != old_ref) {
258     obj->SetFieldObjectWithoutWriteBarrier<
259         /*kTransactionActive*/false, /*kCheckTransaction*/false, kVerifyNone, /*kIsVolatile*/false>(
260             offset,
261             new_ref);
262   }
263 }
264 
VerifyRootSingleUpdate(void * root,mirror::Object * old_ref,const RootInfo & info)265 inline bool MarkCompact::VerifyRootSingleUpdate(void* root,
266                                                 mirror::Object* old_ref,
267                                                 const RootInfo& info) {
268   // ASAN promotes stack-frames to heap in order to detect
269   // stack-use-after-return issues. And HWASAN has pointers tagged, which makes
270   // it difficult to recognize and prevent stack pointers from being checked.
271   // So skip using double-root update detection on ASANs.
272   if (kIsDebugBuild && !kMemoryToolIsAvailable && !kHwAsanEnabled) {
273     void* stack_low_addr = stack_low_addr_;
274     void* stack_high_addr = stack_high_addr_;
275     if (!HasAddress(old_ref)) {
276       return false;
277     }
278     Thread* self = Thread::Current();
279     if (UNLIKELY(stack_low_addr == nullptr)) {
280       stack_low_addr = self->GetStackEnd();
281       stack_high_addr = reinterpret_cast<char*>(stack_low_addr) + self->GetStackSize();
282     }
283     if (std::less<void*>{}(root, stack_low_addr) || std::greater<void*>{}(root, stack_high_addr)) {
284       bool inserted;
285       {
286         MutexLock mu(self, lock_);
287         inserted = updated_roots_->insert(root).second;
288       }
289       if (!inserted) {
290         std::ostringstream oss;
291         heap_->DumpSpaces(oss);
292         MemMap::DumpMaps(oss, /* terse= */ true);
293         CHECK(inserted) << "root=" << root << " old_ref=" << old_ref
294                         << " stack_low_addr=" << stack_low_addr
295                         << " stack_high_addr=" << stack_high_addr << " maps\n"
296                         << oss.str();
297       }
298     }
299     DCHECK(reinterpret_cast<uint8_t*>(old_ref) >= black_allocations_begin_ ||
300            live_words_bitmap_->Test(old_ref))
301         << "ref=" << old_ref << " <" << mirror::Object::PrettyTypeOf(old_ref) << "> RootInfo ["
302         << info << "]";
303   }
304   return true;
305 }
306 
UpdateRoot(mirror::CompressedReference<mirror::Object> * root,uint8_t * begin,uint8_t * end,const RootInfo & info)307 inline void MarkCompact::UpdateRoot(mirror::CompressedReference<mirror::Object>* root,
308                                     uint8_t* begin,
309                                     uint8_t* end,
310                                     const RootInfo& info) {
311   DCHECK(!root->IsNull());
312   mirror::Object* old_ref = root->AsMirrorPtr();
313   if (VerifyRootSingleUpdate(root, old_ref, info)) {
314     mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
315     if (old_ref != new_ref) {
316       root->Assign(new_ref);
317     }
318   }
319 }
320 
UpdateRoot(mirror::Object ** root,uint8_t * begin,uint8_t * end,const RootInfo & info)321 inline void MarkCompact::UpdateRoot(mirror::Object** root,
322                                     uint8_t* begin,
323                                     uint8_t* end,
324                                     const RootInfo& info) {
325   mirror::Object* old_ref = *root;
326   if (VerifyRootSingleUpdate(root, old_ref, info)) {
327     mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
328     if (old_ref != new_ref) {
329       *root = new_ref;
330     }
331   }
332 }
333 
334 template <size_t kAlignment>
CountLiveWordsUpto(size_t bit_idx)335 inline size_t MarkCompact::LiveWordsBitmap<kAlignment>::CountLiveWordsUpto(size_t bit_idx) const {
336   const size_t word_offset = Bitmap::BitIndexToWordIndex(bit_idx);
337   uintptr_t word;
338   size_t ret = 0;
339   // This is needed only if we decide to make chunks 128-bit but still
340   // choose to use 64-bit word for bitmap. Ideally we should use 128-bit
341   // SIMD instructions to compute popcount.
342   if (kBitmapWordsPerVectorWord > 1) {
343     for (size_t i = RoundDown(word_offset, kBitmapWordsPerVectorWord); i < word_offset; i++) {
344       word = Bitmap::Begin()[i];
345       ret += POPCOUNT(word);
346     }
347   }
348   word = Bitmap::Begin()[word_offset];
349   const uintptr_t mask = Bitmap::BitIndexToMask(bit_idx);
350   DCHECK_NE(word & mask, 0u)
351         << " word_offset:" << word_offset
352         << " bit_idx:" << bit_idx
353         << " bit_idx_in_word:" << (bit_idx % Bitmap::kBitsPerBitmapWord)
354         << std::hex << " word: 0x" << word
355         << " mask: 0x" << mask << std::dec;
356   ret += POPCOUNT(word & (mask - 1));
357   return ret;
358 }
359 
PostCompactBlackObjAddr(mirror::Object * old_ref)360 inline mirror::Object* MarkCompact::PostCompactBlackObjAddr(mirror::Object* old_ref) const {
361   return reinterpret_cast<mirror::Object*>(reinterpret_cast<uint8_t*>(old_ref)
362                                            - black_objs_slide_diff_);
363 }
364 
PostCompactOldObjAddr(mirror::Object * old_ref)365 inline mirror::Object* MarkCompact::PostCompactOldObjAddr(mirror::Object* old_ref) const {
366   const uintptr_t begin = live_words_bitmap_->Begin();
367   const uintptr_t addr_offset = reinterpret_cast<uintptr_t>(old_ref) - begin;
368   const size_t vec_idx = addr_offset / kOffsetChunkSize;
369   const size_t live_bytes_in_bitmap_word =
370       live_words_bitmap_->CountLiveWordsUpto(addr_offset / kAlignment) * kAlignment;
371   return reinterpret_cast<mirror::Object*>(begin
372                                            + chunk_info_vec_[vec_idx]
373                                            + live_bytes_in_bitmap_word);
374 }
375 
PostCompactAddressUnchecked(mirror::Object * old_ref)376 inline mirror::Object* MarkCompact::PostCompactAddressUnchecked(mirror::Object* old_ref) const {
377   if (reinterpret_cast<uint8_t*>(old_ref) >= black_allocations_begin_) {
378     return PostCompactBlackObjAddr(old_ref);
379   }
380   if (kIsDebugBuild) {
381     mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
382     DCHECK(live_words_bitmap_->Test(old_ref))
383          << "ref=" << old_ref;
384     if (!moving_space_bitmap_->Test(old_ref)) {
385       std::ostringstream oss;
386       Runtime::Current()->GetHeap()->DumpSpaces(oss);
387       MemMap::DumpMaps(oss, /* terse= */ true);
388       LOG(FATAL) << "ref=" << old_ref
389                  << " from_ref=" << from_ref
390                  << " from-space=" << static_cast<void*>(from_space_begin_)
391                  << " bitmap= " << moving_space_bitmap_->DumpMemAround(old_ref)
392                  << heap_->GetVerification()->DumpRAMAroundAddress(
393                          reinterpret_cast<uintptr_t>(from_ref), 128)
394                  << " maps\n" << oss.str();
395     }
396   }
397   return PostCompactOldObjAddr(old_ref);
398 }
399 
PostCompactAddress(mirror::Object * old_ref,uint8_t * begin,uint8_t * end)400 inline mirror::Object* MarkCompact::PostCompactAddress(mirror::Object* old_ref,
401                                                        uint8_t* begin,
402                                                        uint8_t* end) const {
403   if (LIKELY(HasAddress(old_ref, begin, end))) {
404     return PostCompactAddressUnchecked(old_ref);
405   }
406   return old_ref;
407 }
408 
409 }  // namespace collector
410 }  // namespace gc
411 }  // namespace art
412 
413 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
414