/art/libartbase/base/ |
D | memory_region_test.cc | 26 MemoryRegion region(&data, n); in TEST() local 28 ASSERT_EQ(0, region.LoadUnaligned<char>(0)); in TEST() 33 region.LoadUnaligned<uint32_t>(1)); in TEST() 34 ASSERT_EQ(5 + (6 << kBitsPerByte), region.LoadUnaligned<int16_t>(5)); in TEST() 35 ASSERT_EQ(7u, region.LoadUnaligned<unsigned char>(7)); in TEST() 41 MemoryRegion region(&data, n); in TEST() local 43 region.StoreUnaligned<unsigned char>(0u, 7); in TEST() 44 region.StoreUnaligned<int16_t>(1, 6 + (5 << kBitsPerByte)); in TEST() 45 region.StoreUnaligned<uint32_t>(3, in TEST() 50 region.StoreUnaligned<char>(7, 0); in TEST()
|
D | data_hash.h | 86 size_t operator()(BMR region) const { in operator() 88 size_t num_full_blocks = region.size_in_bits() / kMurmur3BlockBits; in operator() 89 size_t num_end_bits = region.size_in_bits() % kMurmur3BlockBits; in operator() 92 uint32_t block = region.LoadBits(i * kMurmur3BlockBits, kMurmur3BlockBits); in operator() 96 uint32_t end_bits = region.LoadBits(num_full_blocks * kMurmur3BlockBits, num_end_bits); in operator() 101 size_t num_full_bytes = region.size_in_bits() / kBitsPerByte; in operator() 102 size_t num_end_bits = region.size_in_bits() % kBitsPerByte; in operator() 105 uint8_t byte = region.LoadBits(i * kBitsPerByte, kBitsPerByte); in operator() 109 uint32_t end_bits = region.LoadBits(num_full_bytes * kBitsPerByte, num_end_bits); in operator()
|
D | bit_memory_region.h | 47 ALWAYS_INLINE explicit BitMemoryRegion(MemoryRegion region) in BitMemoryRegion() argument 48 : BitMemoryRegion(region.begin(), /* bit_start */ 0, region.size_in_bits()) { in BitMemoryRegion() 50 ALWAYS_INLINE BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_length) in BitMemoryRegion() argument 51 : BitMemoryRegion(region) { in BitMemoryRegion() 460 BitMemoryRegion region(out_->data(), bit_offset_, bit_length); in Allocate() 463 return region; in Allocate() 466 ALWAYS_INLINE void WriteRegion(const BitMemoryRegion& region) { in WriteRegion() argument 467 Allocate(region.size_in_bits()).CopyBits(region); in WriteRegion()
|
D | bit_table.h | 417 MemoryRegion region(const_cast<void*>(bitmap), BitsToBytesRoundUp(num_bits)); in Dedup() 418 DCHECK(num_bits == 0 || BitMemoryRegion(region).LoadBit(num_bits - 1) == 1); in Dedup() 419 DCHECK_EQ(BitMemoryRegion(region).LoadBits(num_bits, region.size_in_bits() - num_bits), 0u); in Dedup() 421 uint32_t hash = hasher(region); in Dedup() 426 if (MemoryRegion::ContentEquals()(region, rows_[it->second])) { in Dedup() 433 void* copy = allocator_->Alloc(region.size(), kArenaAllocBitTableBuilder); in Dedup() 434 memcpy(copy, region.pointer(), region.size()); in Dedup() 435 rows_.push_back(MemoryRegion(copy, region.size())); in Dedup()
|
D | memory_region.h | 129 ALWAYS_INLINE void Extend(const MemoryRegion& region, uintptr_t extra) { in Extend() argument 130 pointer_ = region.pointer(); in Extend() 131 size_ = (region.size() + extra); in Extend()
|
/art/runtime/jit/ |
D | jit_scoped_code_cache_write.h | 38 explicit ScopedCodeCacheWrite(const JitMemoryRegion& region) in ScopedCodeCacheWrite() argument 40 region_(region) { in ScopedCodeCacheWrite() 41 if (kIsDebugBuild || !region.HasDualCodeMapping()) { in ScopedCodeCacheWrite() 43 const MemMap* const updatable_pages = region.GetUpdatableCodeMapping(); in ScopedCodeCacheWrite() 45 int prot = region.HasDualCodeMapping() ? kProtRW : kProtRWX; in ScopedCodeCacheWrite()
|
D | jit_code_cache.h | 118 explicit ZygoteMap(JitMemoryRegion* region) in ZygoteMap() argument 119 : map_(), region_(region), compilation_state_(nullptr) {} in ZygoteMap() 251 JitMemoryRegion* region, 269 JitMemoryRegion* region, 284 void Free(Thread* self, JitMemoryRegion* region, const uint8_t* code, const uint8_t* data) 287 void FreeLocked(JitMemoryRegion* region, const uint8_t* code, const uint8_t* data) 402 bool IsSharedRegion(const JitMemoryRegion& region) const { return ®ion == &shared_region_; } in IsSharedRegion() argument 407 JitMemoryRegion* region = GetCurrentRegion(); in CanAllocateProfilingInfo() local 408 return region->IsValid() && !IsSharedRegion(*region); in CanAllocateProfilingInfo()
|
D | jit_code_cache.cc | 227 JitMemoryRegion region; in Create() local 228 if (!region.Initialize(initial_capacity, in Create() 236 if (region.HasCodeMapping()) { in Create() 237 const MemMap* exec_pages = region.GetExecPages(); in Create() 245 jit_code_cache->shared_region_ = std::move(region); in Create() 247 jit_code_cache->private_region_ = std::move(region); in Create() 653 JitMemoryRegion* region, in Commit() argument 669 DCheckRootsAreValid(roots, IsSharedRegion(*region)); in Commit() 679 const uint8_t* code_ptr = region->CommitCode(reserved_code, code, stack_map_data); in Commit() 686 if (!region->CommitData(reserved_data, roots, stack_map)) { in Commit() [all …]
|
/art/test/1000-non-moving-space-stress/ |
D | info.txt | 4 reference to an object cleared or moved from a newly allocated region 5 of the region space.
|
/art/runtime/gc/space/ |
D | region_space-inl.h | 329 mirror::Object* region = nullptr; in AllocLarge() local 334 region = AllocLargeInRange<kForEvac>(cyclic_alloc_region_index_, in AllocLarge() 342 if (region == nullptr) { in AllocLarge() 346 region = AllocLargeInRange<kForEvac>( in AllocLarge() 356 if (region != nullptr) { in AllocLarge() 365 region = AllocLargeInRange<kForEvac>(0, in AllocLarge() 372 if (kForEvac && region != nullptr) { in AllocLarge() 375 return region; in AllocLarge()
|
D | region_space.cc | 294 Region* region = RefToRegionLocked(reinterpret_cast<mirror::Object*>(addr)); in ZeroLiveBytesForLargeObject() local 296 DCHECK(region->IsLarge()); in ZeroLiveBytesForLargeObject() 298 DCHECK(region->IsLargeTail()); in ZeroLiveBytesForLargeObject() 300 region->ZeroLiveBytes(); in ZeroLiveBytesForLargeObject()
|
/art/compiler/utils/ |
D | assembler.h | 50 virtual void Process(const MemoryRegion& region, int position) = 0; 167 void CopyInstructions(const MemoryRegion& region); 273 void ProcessFixups(const MemoryRegion& region); 382 virtual void CopyInstructions(const MemoryRegion& region) { in CopyInstructions() argument 383 buffer_.CopyInstructions(region); in CopyInstructions()
|
D | assembler.cc | 54 void AssemblerBuffer::ProcessFixups(const MemoryRegion& region) { in ProcessFixups() argument 57 fixup->Process(region, fixup->position()); in ProcessFixups()
|
D | jni_macro_assembler.h | 95 virtual void CopyInstructions(const MemoryRegion& region) = 0; 289 void CopyInstructions(const MemoryRegion& region) override { in CopyInstructions() argument 290 asm_.CopyInstructions(region); in CopyInstructions()
|
/art/test/659-unpadded-array/ |
D | info.txt | 1 Regression test for the concurrent GC whose region space had
|
/art/test/090-loop-formation/ |
D | info.txt | 3 in the loop region, and the JIT compiler won't choke on unresolved fields.
|
/art/compiler/jit/ |
D | jit_compiler.h | 45 Thread* self, JitMemoryRegion* region, ArtMethod* method, CompilationKind kind)
|
D | jit_compiler.cc | 177 Thread* self, JitMemoryRegion* region, ArtMethod* method, CompilationKind compilation_kind) { in CompileMethod() argument 204 self, code_cache, region, method, compilation_kind, jit_logger_.get()); in CompileMethod()
|
/art/runtime/oat/ |
D | stack_map.cc | 171 CodeInfo code_info(code_info_data, &num_bits, [&](size_t i, auto* table, BitMemoryRegion region) { in CollectSizeStats() argument 174 table_stats.AddBits(region.size_in_bits()); in CollectSizeStats() 175 table_stats["Header"].AddBits(region.size_in_bits() - table->DataBitSize()); in CollectSizeStats()
|
/art/compiler/utils/arm64/ |
D | assembler_arm64.cc | 82 void Arm64Assembler::CopyInstructions(const MemoryRegion& region) { in CopyInstructions() argument 85 region.CopyFrom(0, from); in CopyInstructions()
|
D | assembler_arm64.h | 94 void CopyInstructions(const MemoryRegion& region) override;
|
/art/compiler/ |
D | compiler.h | 72 [[maybe_unused]] jit::JitMemoryRegion* region, in JitCompile() argument
|
/art/compiler/optimizing/ |
D | optimizing_compiler.cc | 279 jit::JitMemoryRegion* region, 1260 jit::JitMemoryRegion* region, in JitCompile() argument 1266 DCHECK_EQ(compiler_options.IsJitCompilerForSharedCode(), code_cache->IsSharedRegion(*region)); in JitCompile() 1313 region, in JitCompile() 1350 region, in JitCompile() 1361 code_cache->Free(self, region, reserved_code.data(), reserved_data.data()); in JitCompile() 1409 region, in JitCompile() 1462 region, in JitCompile() 1475 code_cache->Free(self, region, reserved_code.data(), reserved_data.data()); in JitCompile()
|
/art/dex2oat/linker/ |
D | code_info_table_deduper.cc | 87 BitMemoryRegion region( in Dedupe() local
|
/art/compiler/utils/arm/ |
D | assembler_arm_vixl.cc | 55 void ArmVIXLAssembler::CopyInstructions(const MemoryRegion& region) { in CopyInstructions() argument 58 region.CopyFrom(0, from); in CopyInstructions()
|