Commit 1403fd7d authored by Clemens Backes's avatar Clemens Backes Committed by Commit Bot

[wasm] Avoid unnecessary jump tables

If multiple code spaces are created, each of them currently gets its own
jump table (on 64 bit platforms). Since we try to allocate new code
spaces right after existing ones, this is often not necessary. We could
instead reuse the existing jump table(s).
This saves code space for the unneeded jump tables and avoid the cost of
patching the redundant jump tables when we replace code objects.

This CL implements this by checking whether an existing jump table (or
pair of far jump table and (near) jump table) fully covers a new code
space, and reuses the existing jump table in that case.

R=ahaas@chromium.org

Change-Id: Id8751b9c4036cf8f85f9baa2b0be8b2cfb5716ff
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2043846Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66364}
parent 01fbaed0
...@@ -66,6 +66,16 @@ class AddressRegion { ...@@ -66,6 +66,16 @@ class AddressRegion {
}; };
ASSERT_TRIVIALLY_COPYABLE(AddressRegion); ASSERT_TRIVIALLY_COPYABLE(AddressRegion);
// Construct an AddressRegion from anything providing a {data()} and {size()}
// accessor.
template <typename Container,
typename = decltype(std::declval<Container>().data()),
typename = decltype(std::declval<Container>().size())>
inline constexpr AddressRegion AddressRegionOf(Container&& c) {
return AddressRegion{reinterpret_cast<AddressRegion::Address>(c.data()),
sizeof(*c.data()) * c.size()};
}
inline std::ostream& operator<<(std::ostream& out, AddressRegion region) { inline std::ostream& operator<<(std::ostream& out, AddressRegion region) {
return out << "[" << reinterpret_cast<void*>(region.begin()) << "+" return out << "[" << reinterpret_cast<void*>(region.begin()) << "+"
<< region.size() << "]"; << region.size() << "]";
......
...@@ -71,6 +71,9 @@ class Vector { ...@@ -71,6 +71,9 @@ class Vector {
// Returns a pointer to the start of the data in the vector. // Returns a pointer to the start of the data in the vector.
constexpr T* begin() const { return start_; } constexpr T* begin() const { return start_; }
// For consistency with other containers, do also provide a {data} accessor.
constexpr T* data() const { return start_; }
// Returns a pointer past the end of the data in the vector. // Returns a pointer past the end of the data in the vector.
constexpr T* end() const { return start_ + length_; } constexpr T* end() const { return start_ + length_; }
......
...@@ -835,7 +835,7 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) { ...@@ -835,7 +835,7 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
int mode_mask = int mode_mask =
RelocInfo::kApplyMask | RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL); RelocInfo::kApplyMask | RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
auto jump_tables_ref = auto jump_tables_ref =
FindJumpTablesForCode(reinterpret_cast<Address>(dst_code_bytes.begin())); FindJumpTablesForRegion(base::AddressRegionOf(dst_code_bytes));
Address dst_code_addr = reinterpret_cast<Address>(dst_code_bytes.begin()); Address dst_code_addr = reinterpret_cast<Address>(dst_code_bytes.begin());
Address constant_pool_start = dst_code_addr + constant_pool_offset; Address constant_pool_start = dst_code_addr + constant_pool_offset;
RelocIterator orig_it(*code, mode_mask); RelocIterator orig_it(*code, mode_mask);
...@@ -899,9 +899,9 @@ void NativeModule::UseLazyStub(uint32_t func_index) { ...@@ -899,9 +899,9 @@ void NativeModule::UseLazyStub(uint32_t func_index) {
JumpTableAssembler::GenerateLazyCompileTable( JumpTableAssembler::GenerateLazyCompileTable(
lazy_compile_table_->instruction_start(), num_slots, lazy_compile_table_->instruction_start(), num_slots,
module_->num_imported_functions, module_->num_imported_functions,
GetNearRuntimeStubEntry( GetNearRuntimeStubEntry(WasmCode::kWasmCompileLazy,
WasmCode::kWasmCompileLazy, FindJumpTablesForRegion(base::AddressRegionOf(
FindJumpTablesForCode(lazy_compile_table_->instruction_start()))); lazy_compile_table_->instructions()))));
} }
// Add jump table entry for jump to the lazy compile stub. // Add jump table entry for jump to the lazy compile stub.
...@@ -923,7 +923,7 @@ std::unique_ptr<WasmCode> NativeModule::AddCode( ...@@ -923,7 +923,7 @@ std::unique_ptr<WasmCode> NativeModule::AddCode(
Vector<byte> code_space = Vector<byte> code_space =
code_allocator_.AllocateForCode(this, desc.instr_size); code_allocator_.AllocateForCode(this, desc.instr_size);
auto jump_table_ref = auto jump_table_ref =
FindJumpTablesForCode(reinterpret_cast<Address>(code_space.begin())); FindJumpTablesForRegion(base::AddressRegionOf(code_space));
return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots, return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots,
std::move(protected_instructions), std::move(protected_instructions),
std::move(source_position_table), kind, tier, std::move(source_position_table), kind, tier,
...@@ -936,7 +936,7 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace( ...@@ -936,7 +936,7 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
OwnedVector<ProtectedInstructionData> protected_instructions, OwnedVector<ProtectedInstructionData> protected_instructions,
OwnedVector<const byte> source_position_table, WasmCode::Kind kind, OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
ExecutionTier tier, Vector<uint8_t> dst_code_bytes, ExecutionTier tier, Vector<uint8_t> dst_code_bytes,
const JumpTablesRef& jump_tables_ref) { const JumpTablesRef& jump_tables) {
OwnedVector<byte> reloc_info; OwnedVector<byte> reloc_info;
if (desc.reloc_size > 0) { if (desc.reloc_size > 0) {
reloc_info = OwnedVector<byte>::New(desc.reloc_size); reloc_info = OwnedVector<byte>::New(desc.reloc_size);
...@@ -973,13 +973,13 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace( ...@@ -973,13 +973,13 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
RelocInfo::Mode mode = it.rinfo()->rmode(); RelocInfo::Mode mode = it.rinfo()->rmode();
if (RelocInfo::IsWasmCall(mode)) { if (RelocInfo::IsWasmCall(mode)) {
uint32_t call_tag = it.rinfo()->wasm_call_tag(); uint32_t call_tag = it.rinfo()->wasm_call_tag();
Address target = GetNearCallTargetForFunction(call_tag, jump_tables_ref); Address target = GetNearCallTargetForFunction(call_tag, jump_tables);
it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH); it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
} else if (RelocInfo::IsWasmStubCall(mode)) { } else if (RelocInfo::IsWasmStubCall(mode)) {
uint32_t stub_call_tag = it.rinfo()->wasm_call_tag(); uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount); DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
Address entry = GetNearRuntimeStubEntry( Address entry = GetNearRuntimeStubEntry(
static_cast<WasmCode::RuntimeStubId>(stub_call_tag), jump_tables_ref); static_cast<WasmCode::RuntimeStubId>(stub_call_tag), jump_tables);
it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH); it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
} else { } else {
it.rinfo()->apply(delta); it.rinfo()->apply(delta);
...@@ -1234,12 +1234,10 @@ void NativeModule::AddCodeSpace( ...@@ -1234,12 +1234,10 @@ void NativeModule::AddCodeSpace(
WasmCode* jump_table = nullptr; WasmCode* jump_table = nullptr;
WasmCode* far_jump_table = nullptr; WasmCode* far_jump_table = nullptr;
const uint32_t num_wasm_functions = module_->num_declared_functions; const uint32_t num_wasm_functions = module_->num_declared_functions;
const bool has_functions = num_wasm_functions > 0;
const bool is_first_code_space = code_space_data_.empty(); const bool is_first_code_space = code_space_data_.empty();
// TODO(clemensb): Avoid additional jump table if the code space is close // We always need a far jump table, because it contains the runtime stubs.
// enough to another existing code space. const bool needs_far_jump_table = !FindJumpTablesForRegion(region).is_valid();
const bool needs_jump_table = const bool needs_jump_table = num_wasm_functions > 0 && needs_far_jump_table;
has_functions && (kNeedsFarJumpsBetweenCodeSpaces || is_first_code_space);
if (needs_jump_table) { if (needs_jump_table) {
jump_table = CreateEmptyJumpTableInRegion( jump_table = CreateEmptyJumpTableInRegion(
...@@ -1248,7 +1246,7 @@ void NativeModule::AddCodeSpace( ...@@ -1248,7 +1246,7 @@ void NativeModule::AddCodeSpace(
CHECK(region.contains(jump_table->instruction_start())); CHECK(region.contains(jump_table->instruction_start()));
} }
// Always allocate a far jump table, because it contains the runtime stubs. if (needs_far_jump_table) {
int num_function_slots = NumWasmFunctionsInFarJumpTable(num_wasm_functions); int num_function_slots = NumWasmFunctionsInFarJumpTable(num_wasm_functions);
far_jump_table = CreateEmptyJumpTableInRegion( far_jump_table = CreateEmptyJumpTableInRegion(
JumpTableAssembler::SizeForNumberOfFarJumpSlots( JumpTableAssembler::SizeForNumberOfFarJumpSlots(
...@@ -1272,6 +1270,7 @@ void NativeModule::AddCodeSpace( ...@@ -1272,6 +1270,7 @@ void NativeModule::AddCodeSpace(
JumpTableAssembler::GenerateFarJumpTable( JumpTableAssembler::GenerateFarJumpTable(
far_jump_table->instruction_start(), builtin_addresses, far_jump_table->instruction_start(), builtin_addresses,
WasmCode::kRuntimeStubCount, num_function_slots); WasmCode::kRuntimeStubCount, num_function_slots);
}
if (is_first_code_space) main_jump_table_ = jump_table; if (is_first_code_space) main_jump_table_ = jump_table;
...@@ -1350,32 +1349,48 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const { ...@@ -1350,32 +1349,48 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
return main_jump_table_->instruction_start() + slot_offset; return main_jump_table_->instruction_start() + slot_offset;
} }
NativeModule::JumpTablesRef NativeModule::FindJumpTablesForCode( NativeModule::JumpTablesRef NativeModule::FindJumpTablesForRegion(
Address code_addr) const { base::AddressRegion code_region) const {
auto jump_table_usable = [code_region](const WasmCode* jump_table) {
Address table_start = jump_table->instruction_start();
Address table_end = table_start + jump_table->instructions().size();
// Compute the maximum distance from anywhere in the code region to anywhere
// in the jump table, avoiding any underflow.
size_t max_distance = std::max(
code_region.end() > table_start ? code_region.end() - table_start : 0,
table_end > code_region.begin() ? table_end - code_region.begin() : 0);
return max_distance < kMaxWasmCodeSpaceSize;
};
base::MutexGuard guard(&allocation_mutex_); base::MutexGuard guard(&allocation_mutex_);
for (auto& code_space_data : code_space_data_) { for (auto& code_space_data : code_space_data_) {
const bool jump_table_reachable = DCHECK_IMPLIES(code_space_data.jump_table, code_space_data.far_jump_table);
!kNeedsFarJumpsBetweenCodeSpaces || if (!code_space_data.far_jump_table) continue;
code_space_data.region.contains(code_addr); // Only return these jump tables if they are reachable from the whole
if (jump_table_reachable && code_space_data.far_jump_table) { // {code_region}.
// We might not have a jump table if we have no functions. if (kNeedsFarJumpsBetweenCodeSpaces &&
(!jump_table_usable(code_space_data.far_jump_table) ||
(code_space_data.jump_table &&
!jump_table_usable(code_space_data.jump_table)))) {
continue;
}
return {code_space_data.jump_table return {code_space_data.jump_table
? code_space_data.jump_table->instruction_start() ? code_space_data.jump_table->instruction_start()
: kNullAddress, : kNullAddress,
code_space_data.far_jump_table->instruction_start()}; code_space_data.far_jump_table->instruction_start()};
} }
} return {};
FATAL("code_addr is not part of a code space");
} }
Address NativeModule::GetNearCallTargetForFunction( Address NativeModule::GetNearCallTargetForFunction(
uint32_t func_index, const JumpTablesRef& jump_tables) const { uint32_t func_index, const JumpTablesRef& jump_tables) const {
DCHECK(jump_tables.is_valid());
uint32_t slot_offset = GetJumpTableOffset(func_index); uint32_t slot_offset = GetJumpTableOffset(func_index);
return jump_tables.jump_table_start + slot_offset; return jump_tables.jump_table_start + slot_offset;
} }
Address NativeModule::GetNearRuntimeStubEntry( Address NativeModule::GetNearRuntimeStubEntry(
WasmCode::RuntimeStubId index, const JumpTablesRef& jump_tables) const { WasmCode::RuntimeStubId index, const JumpTablesRef& jump_tables) const {
DCHECK(jump_tables.is_valid());
auto offset = JumpTableAssembler::FarJumpSlotIndexToOffset(index); auto offset = JumpTableAssembler::FarJumpSlotIndexToOffset(index);
return jump_tables.far_jump_table_start + offset; return jump_tables.far_jump_table_start + offset;
} }
...@@ -1761,8 +1776,7 @@ std::vector<WasmCode*> NativeModule::AddCompiledCode( ...@@ -1761,8 +1776,7 @@ std::vector<WasmCode*> NativeModule::AddCompiledCode(
Vector<byte> code_space = Vector<byte> code_space =
code_allocator_.AllocateForCode(this, total_code_space); code_allocator_.AllocateForCode(this, total_code_space);
// Lookup the jump tables to use once, then use for all code objects. // Lookup the jump tables to use once, then use for all code objects.
auto jump_tables_ref = auto jump_tables = FindJumpTablesForRegion(base::AddressRegionOf(code_space));
FindJumpTablesForCode(reinterpret_cast<Address>(code_space.begin()));
std::vector<std::unique_ptr<WasmCode>> generated_code; std::vector<std::unique_ptr<WasmCode>> generated_code;
generated_code.reserve(results.size()); generated_code.reserve(results.size());
...@@ -1777,7 +1791,7 @@ std::vector<WasmCode*> NativeModule::AddCompiledCode( ...@@ -1777,7 +1791,7 @@ std::vector<WasmCode*> NativeModule::AddCompiledCode(
result.func_index, result.code_desc, result.frame_slot_count, result.func_index, result.code_desc, result.frame_slot_count,
result.tagged_parameter_slots, std::move(result.protected_instructions), result.tagged_parameter_slots, std::move(result.protected_instructions),
std::move(result.source_positions), GetCodeKind(result), std::move(result.source_positions), GetCodeKind(result),
result.result_tier, this_code_space, jump_tables_ref)); result.result_tier, this_code_space, jump_tables));
} }
DCHECK_EQ(0, code_space.size()); DCHECK_EQ(0, code_space.size());
......
...@@ -462,23 +462,25 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -462,23 +462,25 @@ class V8_EXPORT_PRIVATE NativeModule final {
Address GetCallTargetForFunction(uint32_t func_index) const; Address GetCallTargetForFunction(uint32_t func_index) const;
struct JumpTablesRef { struct JumpTablesRef {
const Address jump_table_start; const Address jump_table_start = kNullAddress;
const Address far_jump_table_start; const Address far_jump_table_start = kNullAddress;
bool is_valid() const { return far_jump_table_start != kNullAddress; }
}; };
// Finds the jump tables that should be used for the code at {code_addr}. This // Finds the jump tables that should be used for given code region. This
// information is then passed to {GetNearCallTargetForFunction} and // information is then passed to {GetNearCallTargetForFunction} and
// {GetNearRuntimeStubEntry} to avoid the overhead of looking this information // {GetNearRuntimeStubEntry} to avoid the overhead of looking this information
// up there. // up there. Return an empty struct if no suitable jump tables exist.
JumpTablesRef FindJumpTablesForCode(Address code_addr) const; JumpTablesRef FindJumpTablesForRegion(base::AddressRegion) const;
// Similarly to {GetCallTargetForFunction}, but uses the jump table previously // Similarly to {GetCallTargetForFunction}, but uses the jump table previously
// looked up via {FindJumpTablesForCode}. // looked up via {FindJumpTablesForRegion}.
Address GetNearCallTargetForFunction(uint32_t func_index, Address GetNearCallTargetForFunction(uint32_t func_index,
const JumpTablesRef&) const; const JumpTablesRef&) const;
// Get a runtime stub entry (which is a far jump table slot) in the jump table // Get a runtime stub entry (which is a far jump table slot) in the jump table
// previously looked up via {FindJumpTablesForCode}. // previously looked up via {FindJumpTablesForRegion}.
Address GetNearRuntimeStubEntry(WasmCode::RuntimeStubId index, Address GetNearRuntimeStubEntry(WasmCode::RuntimeStubId index,
const JumpTablesRef&) const; const JumpTablesRef&) const;
......
...@@ -540,8 +540,8 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) { ...@@ -540,8 +540,8 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED); RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED);
auto jump_tables_ref = auto jump_tables_ref = native_module_->FindJumpTablesForRegion(
native_module_->FindJumpTablesForCode(code->instruction_start()); base::AddressRegionOf(code->instructions()));
for (RelocIterator iter(code->instructions(), code->reloc_info(), for (RelocIterator iter(code->instructions(), code->reloc_info(),
code->constant_pool(), mask); code->constant_pool(), mask);
!iter.done(); iter.next()) { !iter.done(); iter.next()) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment