Commit de5d1200 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[ptr-compr] Re-imlpement BoundedPageAllocator::ReleasePages()

Trimming may free up some allocatable pages that can be reused by subsequent
allocations.

This CL also fixes base::AddressRegion::contains(Address, size_t).

Bug: v8:8096
Change-Id: I3b7381fd32f7dbf186dffc1a26d5a88cd8a30d2f
Reviewed-on: https://chromium-review.googlesource.com/1249127Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56284}
parent f4060f25
......@@ -38,7 +38,7 @@ class AddressRegion {
bool contains(Address address, size_t size) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
Address offset = address - begin();
return (offset < size_) && (offset <= size_ - size);
return (offset < size_) && (offset + size <= size_);
}
bool contains(AddressRegion region) const {
......
......@@ -53,17 +53,29 @@ bool BoundedPageAllocator::FreePages(void* raw_address, size_t size) {
bool BoundedPageAllocator::ReleasePages(void* raw_address, size_t size,
size_t new_size) {
Address address = reinterpret_cast<Address>(raw_address);
CHECK(IsAligned(address, allocate_page_size_));
DCHECK_LT(new_size, size);
DCHECK(IsAligned(size - new_size, commit_page_size_));
// Check if we freed any allocatable pages by this release.
size_t allocated_size = RoundUp(size, allocate_page_size_);
size_t new_allocated_size = RoundUp(new_size, allocate_page_size_);
#ifdef DEBUG
{
CHECK_LT(new_size, size);
CHECK(IsAligned(size - new_size, commit_page_size_));
// There must be an allocated region at given |address| of a size not
// smaller than |size|.
LockGuard<Mutex> guard(&mutex_);
size_t used_region_size = region_allocator_.CheckRegion(address);
CHECK_LE(size, used_region_size);
CHECK_EQ(allocated_size, region_allocator_.CheckRegion(address));
}
#endif
if (new_allocated_size < allocated_size) {
LockGuard<Mutex> guard(&mutex_);
region_allocator_.TrimRegion(address, new_allocated_size);
}
// Keep the region in "used" state just uncommit some pages.
Address free_address = address + new_size;
size_t free_size = size - new_size;
......
......@@ -81,6 +81,7 @@ void RegionAllocator::FreeListRemoveRegion(Region* region) {
RegionAllocator::Region* RegionAllocator::Split(Region* region,
size_t new_size) {
DCHECK(IsAligned(new_size, page_size_));
DCHECK_NE(new_size, 0);
DCHECK_GT(region->size(), new_size);
// Create new region and put it to the lists after the |region|.
......@@ -193,7 +194,9 @@ bool RegionAllocator::AllocateRegionAt(Address requested_address, size_t size) {
return true;
}
size_t RegionAllocator::FreeRegion(Address address) {
size_t RegionAllocator::TrimRegion(Address address, size_t new_size) {
DCHECK(IsAligned(new_size, page_size_));
AllRegionsSet::iterator region_iter = FindRegion(address);
if (region_iter == all_regions_.end()) {
return 0;
......@@ -203,10 +206,14 @@ size_t RegionAllocator::FreeRegion(Address address) {
return 0;
}
size_t size = region->size();
// The region must not be in the free list.
DCHECK_EQ(free_regions_.find(*region_iter), free_regions_.end());
if (new_size > 0) {
region = Split(region, new_size);
++region_iter;
}
size_t size = region->size();
region->set_is_used(false);
// Merge current region with the surrounding ones if they are free.
......@@ -221,7 +228,7 @@ size_t RegionAllocator::FreeRegion(Address address) {
Merge(region_iter, next_iter);
}
}
if (region->begin() != whole_region_.begin()) {
if (new_size == 0 && region->begin() != whole_region_.begin()) {
// There must be a range before the current one.
AllRegionsSet::iterator prev_iter = std::prev(region_iter);
DCHECK_NE(prev_iter, all_regions_.end());
......
......@@ -48,7 +48,13 @@ class V8_BASE_EXPORT RegionAllocator final {
// Frees region at given |address|, returns the size of the region.
// There must be a used region starting at given address otherwise nothing
// will be freed and 0 will be returned.
size_t FreeRegion(Address address);
size_t FreeRegion(Address address) { return TrimRegion(address, 0); }
// Decreases size of the previously allocated region at |address|, returns
// freed size. |new_size| must be |page_size|-aligned and
// less than or equal to current region's size. Setting new size to zero
// frees the region.
size_t TrimRegion(Address address, size_t new_size);
// If there is a used region starting at given address returns its size
// otherwise 0.
......
......@@ -37,6 +37,8 @@ TEST(AddressRegionTest, Contains) {
CHECK(region.contains(end - 1));
// Test two-arguments contains().
CHECK(!region.contains(start - 1, size));
CHECK(!region.contains(start, size + 1));
CHECK(!region.contains(start - 17, 17));
CHECK(!region.contains(start - 17, size * 2));
CHECK(!region.contains(end, 1));
......
......@@ -322,5 +322,35 @@ TEST(RegionAllocatorTest, FindRegion) {
}
}
TEST(RegionAllocatorTest, TrimRegion) {
const size_t kPageSize = 4 * KB;
const size_t kPageCount = 64;
const size_t kSize = kPageSize * kPageCount;
const Address kBegin = static_cast<Address>(kPageSize * 153);
RegionAllocator ra(kBegin, kSize, kPageSize);
Address address = kBegin + 13 * kPageSize;
size_t size = 37 * kPageSize;
size_t free_size = kSize - size;
CHECK(ra.AllocateRegionAt(address, size));
size_t trim_size = kPageSize;
do {
CHECK_EQ(ra.CheckRegion(address), size);
CHECK_EQ(ra.free_size(), free_size);
trim_size = std::min(size, trim_size);
size -= trim_size;
free_size += trim_size;
CHECK_EQ(ra.TrimRegion(address, size), trim_size);
trim_size *= 2;
} while (size != 0);
// Check that the whole region is free and can be fully allocated.
CHECK_EQ(ra.free_size(), kSize);
CHECK_EQ(ra.AllocateRegion(kSize), kBegin);
}
} // namespace base
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment