1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
18 #define ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
19 
20 #include "bump_pointer_space.h"
21 
22 #include "base/bit_utils.h"
23 #include "mirror/object-inl.h"
24 
25 namespace art HIDDEN {
26 namespace gc {
27 namespace space {
28 
Alloc(Thread *,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)29 inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated,
30                                                size_t* usable_size,
31                                                size_t* bytes_tl_bulk_allocated) {
32   num_bytes = RoundUp(num_bytes, kAlignment);
33   mirror::Object* ret = AllocNonvirtual(num_bytes);
34   if (LIKELY(ret != nullptr)) {
35     *bytes_allocated = num_bytes;
36     if (usable_size != nullptr) {
37       *usable_size = num_bytes;
38     }
39     *bytes_tl_bulk_allocated = num_bytes;
40   }
41   return ret;
42 }
43 
AllocThreadUnsafe(Thread * self,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)44 inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes,
45                                                            size_t* bytes_allocated,
46                                                            size_t* usable_size,
47                                                            size_t* bytes_tl_bulk_allocated) {
48   Locks::mutator_lock_->AssertExclusiveHeld(self);
49   num_bytes = RoundUp(num_bytes, kAlignment);
50   uint8_t* end = end_.load(std::memory_order_relaxed);
51   if (end + num_bytes > growth_end_) {
52     return nullptr;
53   }
54   mirror::Object* obj = reinterpret_cast<mirror::Object*>(end);
55   end_.store(end + num_bytes, std::memory_order_relaxed);
56   *bytes_allocated = num_bytes;
57   // Use the CAS free versions as an optimization.
58   objects_allocated_.store(objects_allocated_.load(std::memory_order_relaxed) + 1,
59                            std::memory_order_relaxed);
60   bytes_allocated_.store(bytes_allocated_.load(std::memory_order_relaxed) + num_bytes,
61                          std::memory_order_relaxed);
62   if (UNLIKELY(usable_size != nullptr)) {
63     *usable_size = num_bytes;
64   }
65   *bytes_tl_bulk_allocated = num_bytes;
66   return obj;
67 }
68 
AllocNonvirtualWithoutAccounting(size_t num_bytes)69 inline mirror::Object* BumpPointerSpace::AllocNonvirtualWithoutAccounting(size_t num_bytes) {
70   DCHECK_ALIGNED(num_bytes, kAlignment);
71   uint8_t* old_end;
72   uint8_t* new_end;
73   do {
74     old_end = end_.load(std::memory_order_relaxed);
75     new_end = old_end + num_bytes;
76     // If there is no more room in the region, we are out of memory.
77     if (UNLIKELY(new_end > growth_end_)) {
78       return nullptr;
79     }
80   } while (!end_.CompareAndSetWeakSequentiallyConsistent(old_end, new_end));
81   return reinterpret_cast<mirror::Object*>(old_end);
82 }
83 
AllocNonvirtual(size_t num_bytes)84 inline mirror::Object* BumpPointerSpace::AllocNonvirtual(size_t num_bytes) {
85   mirror::Object* ret = AllocNonvirtualWithoutAccounting(num_bytes);
86   if (ret != nullptr) {
87     objects_allocated_.fetch_add(1, std::memory_order_relaxed);
88     bytes_allocated_.fetch_add(num_bytes, std::memory_order_relaxed);
89   }
90   return ret;
91 }
92 
GetNextObject(mirror::Object * obj)93 inline mirror::Object* BumpPointerSpace::GetNextObject(mirror::Object* obj) {
94   const uintptr_t position = reinterpret_cast<uintptr_t>(obj) + obj->SizeOf();
95   return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment));
96 }
97 
98 }  // namespace space
99 }  // namespace gc
100 }  // namespace art
101 
102 #endif  // ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
103