Lines Matching refs:alloc_size

1939                                              size_t alloc_size,  in AllocateInternalWithGc()  argument
1957 l->PreObjectAllocated(self, h_klass, &alloc_size); in AllocateInternalWithGc()
1982 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1992 alloc_size, in AllocateInternalWithGc()
2029 alloc_size, bytes_allocated, in AllocateInternalWithGc()
2040 VLOG(gc) << "Forcing collection of SoftReferences for " << PrettySize(alloc_size) in AllocateInternalWithGc()
2050 static_cast<int64_t>(kMinFreedHeapAfterGcForAlloc * growth_limit_ + alloc_size); in AllocateInternalWithGc()
2098 self, allocator, alloc_size, bytes_allocated, usable_size, bytes_tl_bulk_allocated); in AllocateInternalWithGc()
2116 ThrowOutOfMemoryError(self, alloc_size, allocator); in AllocateInternalWithGc()
2409 size_t alloc_size = RoundUp(obj_size, kObjectAlignment); in MarkNonForwardedObject() local
2412 auto it = bins_.lower_bound(alloc_size); in MarkNonForwardedObject()
2418 self_, alloc_size, &bytes_allocated, nullptr, &unused_bytes_tl_bulk_allocated); in MarkNonForwardedObject()
2433 DCHECK_GE(size, alloc_size); in MarkNonForwardedObject()
2435 AddBin(size - alloc_size, pos + alloc_size); in MarkNonForwardedObject()
4425 void Heap::JHPCheckNonTlabSampleAllocation(Thread* self, mirror::Object* obj, size_t alloc_size) { in JHPCheckNonTlabSampleAllocation() argument
4436 alloc_size, self->GetTlabPosOffset(), &take_sample, &bytes_until_sample); in JHPCheckNonTlabSampleAllocation()
4439 prof_heap_sampler.ReportSample(obj, alloc_size); in JHPCheckNonTlabSampleAllocation()
4446 size_t alloc_size, in JHPCalculateNextTlabSize() argument
4450 alloc_size, self->GetTlabPosOffset(), take_sample, bytes_until_sample); in JHPCalculateNextTlabSize()
4542 size_t alloc_size, in AllocWithNewTLAB() argument
4552 if (kUsePartialTlabs && alloc_size <= self->TlabRemainingCapacity()) { in AllocWithNewTLAB()
4553 DCHECK_GT(alloc_size, self->TlabSize()); in AllocWithNewTLAB()
4556 const size_t min_expand_size = alloc_size - self->TlabSize(); in AllocWithNewTLAB()
4559 self, kPartialTlabSize, alloc_size, &take_sample, &bytes_until_sample) : in AllocWithNewTLAB()
4569 DCHECK_LE(alloc_size, self->TlabSize()); in AllocWithNewTLAB()
4576 size_t next_tlab_size = RoundDown(alloc_size + kDefaultTLABSize, gPageSize) - alloc_size; in AllocWithNewTLAB()
4579 self, next_tlab_size, alloc_size, &take_sample, &bytes_until_sample); in AllocWithNewTLAB()
4581 const size_t new_tlab_size = alloc_size + next_tlab_size; in AllocWithNewTLAB()
4596 if (space::RegionSpace::kRegionSize >= alloc_size) { in AllocWithNewTLAB()
4605 self, next_pr_tlab_size, alloc_size, &take_sample, &bytes_until_sample); in AllocWithNewTLAB()
4608 ? std::max(alloc_size, next_pr_tlab_size) in AllocWithNewTLAB()
4613 ret = region_space_->AllocNonvirtual<false>(alloc_size, in AllocWithNewTLAB()
4618 JHPCheckNonTlabSampleAllocation(self, ret, alloc_size); in AllocWithNewTLAB()
4625 if (!IsOutOfMemoryOnAllocation(allocator_type, alloc_size, grow)) { in AllocWithNewTLAB()
4626 ret = region_space_->AllocNonvirtual<false>(alloc_size, in AllocWithNewTLAB()
4631 JHPCheckNonTlabSampleAllocation(self, ret, alloc_size); in AllocWithNewTLAB()
4640 if (LIKELY(!IsOutOfMemoryOnAllocation(allocator_type, alloc_size, grow))) { in AllocWithNewTLAB()
4641 ret = region_space_->AllocNonvirtual<false>(alloc_size, in AllocWithNewTLAB()
4646 JHPCheckNonTlabSampleAllocation(self, ret, alloc_size); in AllocWithNewTLAB()
4654 ret = self->AllocTlab(alloc_size); in AllocWithNewTLAB()
4656 *bytes_allocated = alloc_size; in AllocWithNewTLAB()
4657 *usable_size = alloc_size; in AllocWithNewTLAB()
4664 GetHeapSampler().ReportSample(ret, alloc_size); in AllocWithNewTLAB()
4678 void Heap::VlogHeapGrowth(size_t old_footprint, size_t new_footprint, size_t alloc_size) { in VlogHeapGrowth() argument
4680 << PrettySize(new_footprint) << " for a " << PrettySize(alloc_size) << " allocation"; in VlogHeapGrowth()