Commit 16191e9a authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Add function slots to the far jump table

This adds the --wasm-far-jump-table flag, which enables the extension
of the far jump table with a slot for each wasm function.

R=mstarzinger@chromium.org

Bug: v8:9477
Change-Id: I61cb4592aa8ed75e2772371e9b7dbfdfe9bb0046
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1792907
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63680}
parent 8734a486
......@@ -769,6 +769,9 @@ DEFINE_BOOL(wasm_code_gc, true, "enable garbage collection of wasm code")
DEFINE_BOOL(trace_wasm_code_gc, false, "trace garbage collection of wasm code")
DEFINE_BOOL(stress_wasm_code_gc, false,
"stress test garbage collection of wasm code")
DEFINE_BOOL(wasm_far_jump_table, false,
"use multiple separate code spaces that might require far jumps "
"between them")
// Profiler flags.
DEFINE_INT(frame_count, 1, "number of stack frames inspected by the profiler")
......
......@@ -21,7 +21,9 @@ namespace wasm {
// other purposes:
// - the far stub table contains one entry per wasm runtime stub (see
// {WasmCode::RuntimeStubId}, which jumps to the corresponding embedded
// builtin.
// builtin, plus (if {FLAG_wasm_far_jump_table} is enabled and not the full
// address space can be reached via the jump table) one entry per wasm
// function.
// - the lazy compile table contains one entry per wasm function which jumps to
// the common {WasmCompileLazy} builtin and passes the function index that was
// invoked.
......@@ -85,8 +87,10 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
// Determine the size of a far jump table containing the given number of
// slots.
static constexpr uint32_t SizeForNumberOfFarJumpSlots(int num_stubs) {
return num_stubs * kFarJumpTableSlotSize;
static constexpr uint32_t SizeForNumberOfFarJumpSlots(
int num_stubs, int num_function_slots) {
int num_entries = num_stubs + num_function_slots;
return num_entries * kFarJumpTableSlotSize;
}
// Translate a slot index to an offset into the lazy compile table.
......@@ -115,14 +119,18 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
}
static void GenerateFarJumpTable(Address base, Address* stub_targets,
int num_stubs) {
uint32_t table_size = num_stubs * kFarJumpTableSlotSize;
int num_stubs, int num_function_slots) {
uint32_t table_size =
SizeForNumberOfFarJumpSlots(num_stubs, num_function_slots);
// Assume enough space, so the Assembler does not try to grow the buffer.
JumpTableAssembler jtasm(base, table_size + 256);
int offset = 0;
for (int index = 0; index < num_stubs; ++index) {
for (int index = 0; index < num_stubs + num_function_slots; ++index) {
DCHECK_EQ(offset, FarJumpSlotIndexToOffset(index));
jtasm.EmitFarJumpSlot(stub_targets[index]);
// Functions slots initially jump to themselves. They are patched before
// being used.
Address target = index < num_stubs ? stub_targets[index] : base + offset;
jtasm.EmitFarJumpSlot(target);
offset += kFarJumpTableSlotSize;
DCHECK_EQ(offset, jtasm.pc_offset());
}
......
......@@ -860,9 +860,13 @@ void NativeModule::SetRuntimeStubs(Isolate* isolate) {
DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
WasmCodeRefScope code_ref_scope;
DCHECK_EQ(1, code_space_data_.size());
int num_function_slots =
kNeedsFarJumpsBetweenCodeSpaces && FLAG_wasm_far_jump_table
? static_cast<int>(module_->num_declared_functions)
: 0;
WasmCode* jump_table = CreateEmptyJumpTableInRegion(
JumpTableAssembler::SizeForNumberOfFarJumpSlots(
WasmCode::kRuntimeStubCount),
WasmCode::kRuntimeStubCount, num_function_slots),
code_space_data_[0].region);
Address base = jump_table->instruction_start();
EmbeddedData embedded_data = EmbeddedData::FromBlob();
......@@ -880,9 +884,11 @@ void NativeModule::SetRuntimeStubs(Isolate* isolate) {
runtime_stub_entries_[i] =
base + JumpTableAssembler::FarJumpSlotIndexToOffset(i);
}
JumpTableAssembler::GenerateFarJumpTable(base, builtin_address,
WasmCode::kRuntimeStubCount);
JumpTableAssembler::GenerateFarJumpTable(
base, builtin_address, WasmCode::kRuntimeStubCount, num_function_slots);
DCHECK_NULL(runtime_stub_table_);
// TODO(clemensh): Store this as "far jump table" (instead of "runtime stub
// table") per code space.
runtime_stub_table_ = jump_table;
DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
}
......
......@@ -350,9 +350,9 @@ class WasmCodeAllocator {
class V8_EXPORT_PRIVATE NativeModule final {
public:
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
static constexpr bool kCanAllocateMoreMemory = false;
static constexpr bool kNeedsFarJumpsBetweenCodeSpaces = true;
#else
static constexpr bool kCanAllocateMoreMemory = true;
static constexpr bool kNeedsFarJumpsBetweenCodeSpaces = false;
#endif
// {AddCode} is thread safe w.r.t. other calls to {AddCode} or methods adding
......
......@@ -675,7 +675,8 @@ std::shared_ptr<NativeModule> WasmEngine::NewNativeModule(
size_t code_size_estimate =
wasm::WasmCodeManager::EstimateNativeModuleCodeSize(module.get());
return NewNativeModule(isolate, enabled, code_size_estimate,
wasm::NativeModule::kCanAllocateMoreMemory,
!wasm::NativeModule::kNeedsFarJumpsBetweenCodeSpaces ||
FLAG_wasm_far_jump_table,
std::move(module));
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment