Commit ec379ea8 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm][gc] Discard pages of freed wasm code

To reduce physical memory consumption, discard code pages that are
fully freed.
To determine pages which only become fully free after several freed
wasm code objects, this CL adds a {DisjointAllocationPool} to track all
freed code ({freed_code_space_} in {NativeModule}).

R=mstarzinger@chromium.org

Bug: v8:8217
Change-Id: I22ad92d2c0bd4469e92f0dfd5aec05c03b5a47d6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1594728
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61244}
parent 2f4c928c
......@@ -43,7 +43,7 @@ namespace wasm {
using trap_handler::ProtectedInstructionData;
void DisjointAllocationPool::Merge(base::AddressRegion region) {
base::AddressRegion DisjointAllocationPool::Merge(base::AddressRegion region) {
auto dest_it = regions_.begin();
auto dest_end = regions_.end();
......@@ -53,7 +53,7 @@ void DisjointAllocationPool::Merge(base::AddressRegion region) {
// After last dest region: insert and done.
if (dest_it == dest_end) {
regions_.push_back(region);
return;
return region;
}
// Adjacent (from below) to dest: merge and done.
......@@ -62,13 +62,13 @@ void DisjointAllocationPool::Merge(base::AddressRegion region) {
region.size() + dest_it->size()};
DCHECK_EQ(merged_region.end(), dest_it->end());
*dest_it = merged_region;
return;
return merged_region;
}
// Before dest: insert and done.
if (dest_it->begin() > region.end()) {
regions_.insert(dest_it, region);
return;
return region;
}
// Src is adjacent from above. Merge and check whether the merged region is
......@@ -83,6 +83,7 @@ void DisjointAllocationPool::Merge(base::AddressRegion region) {
DCHECK_EQ(dest_it->end(), next_dest->end());
regions_.erase(next_dest);
}
return *dest_it;
}
base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
......@@ -1348,17 +1349,16 @@ bool NativeModule::IsRedirectedToInterpreter(uint32_t func_index) {
}
void NativeModule::FreeCode(Vector<WasmCode* const> codes) {
// For now, we only free the {WasmCode} objects and zap the code they referred
// to. We do not actually free the code pages yet.
// TODO(clemensh): Actually free the underlying code pages.
// Zap code area.
// Zap code area and collect freed code regions.
DisjointAllocationPool freed_regions;
size_t code_size = 0;
for (WasmCode* code : codes) {
ZapCode(code->instruction_start(), code->instructions().size());
FlushInstructionCache(code->instruction_start(),
code->instructions().size());
code_size += code->instructions().size();
freed_regions.Merge(base::AddressRegion{code->instruction_start(),
code->instructions().size()});
}
freed_code_size_.fetch_add(code_size);
......@@ -1368,6 +1368,20 @@ void NativeModule::FreeCode(Vector<WasmCode* const> codes) {
DCHECK_EQ(1, owned_code_.count(code->instruction_start()));
owned_code_.erase(code->instruction_start());
}
// Merge {freed_regions} into {freed_code_space_} and discard full pages.
PageAllocator* allocator = GetPlatformPageAllocator();
size_t page_size = allocator->AllocatePageSize();
for (auto region : freed_regions.regions()) {
auto merged_region = freed_code_space_.Merge(region);
Address discard_start = std::max(RoundUp(merged_region.begin(), page_size),
RoundDown(region.begin(), page_size));
Address discard_end = std::min(RoundDown(merged_region.end(), page_size),
RoundUp(region.end(), page_size));
if (discard_start >= discard_end) continue;
allocator->DiscardSystemPages(reinterpret_cast<void*>(discard_start),
discard_end - discard_start);
}
}
void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
......
......@@ -58,8 +58,8 @@ class V8_EXPORT_PRIVATE DisjointAllocationPool final {
// Merge the parameter region into this object while preserving ordering of
// the regions. The assumption is that the passed parameter is not
// intersecting this object - for example, it was obtained from a previous
// Allocate.
void Merge(base::AddressRegion);
// Allocate. Returns the merged region.
base::AddressRegion Merge(base::AddressRegion);
// Allocate a contiguous region of size {size}. Return an empty pool on
// failure.
......@@ -523,8 +523,14 @@ class V8_EXPORT_PRIVATE NativeModule final {
// this module marking those functions that have been redirected.
std::unique_ptr<uint8_t[]> interpreter_redirections_;
// Code space that was reserved and is available for allocations (subset of
// {owned_code_space_}).
DisjointAllocationPool free_code_space_;
// Code space that was allocated for code (subset of {owned_code_space_}).
DisjointAllocationPool allocated_code_space_;
// Code space that was allocated before but is dead now. Full pages within
// this region are discarded. It's still a subset of {owned_code_space_}).
DisjointAllocationPool freed_code_space_;
std::vector<VirtualMemory> owned_code_space_;
// End of fields protected by {allocation_mutex_}.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment