Commit f08cc274 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

Reland "[wasm] Patch jump tables in all code spaces"

This is a reland of d7d25d2a

Original change's description:
> [wasm] Patch jump tables in all code spaces
> 
> If there are multiple code spaces, make sure to patch the jump tables
> in all of them.
> 
> R=mstarzinger@chromium.org
> 
> Bug: v8:9477
> Change-Id: I2ec3d3de913b99623fd310004555337329588da0
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1789289
> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
> Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#63651}

Bug: v8:9477
Change-Id: I89c3d59d8366ac9479e58feea91dd40ee4e01f66
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1796065
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63683}
parent aeccaceb
...@@ -702,12 +702,18 @@ void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) { ...@@ -702,12 +702,18 @@ void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
} }
code_table_.reset(new_table); code_table_.reset(new_table);
base::AddressRegion single_code_space_region;
{
base::MutexGuard guard(&allocation_mutex_);
CHECK_EQ(1, code_space_data_.size()); CHECK_EQ(1, code_space_data_.size());
single_code_space_region = code_space_data_[0].region;
}
// Re-allocate jump table. // Re-allocate jump table.
code_space_data_[0].jump_table = CreateEmptyJumpTableInRegion( main_jump_table_ = CreateEmptyJumpTableInRegion(
JumpTableAssembler::SizeForNumberOfSlots(max_functions), JumpTableAssembler::SizeForNumberOfSlots(max_functions),
code_space_data_[0].region); single_code_space_region);
main_jump_table_ = code_space_data_[0].jump_table; base::MutexGuard guard(&allocation_mutex_);
code_space_data_[0].jump_table = main_jump_table_;
} }
void NativeModule::LogWasmCodes(Isolate* isolate) { void NativeModule::LogWasmCodes(Isolate* isolate) {
...@@ -825,10 +831,15 @@ void NativeModule::UseLazyStub(uint32_t func_index) { ...@@ -825,10 +831,15 @@ void NativeModule::UseLazyStub(uint32_t func_index) {
if (!lazy_compile_table_) { if (!lazy_compile_table_) {
uint32_t num_slots = module_->num_declared_functions; uint32_t num_slots = module_->num_declared_functions;
WasmCodeRefScope code_ref_scope; WasmCodeRefScope code_ref_scope;
base::AddressRegion single_code_space_region;
{
base::MutexGuard guard(&allocation_mutex_);
DCHECK_EQ(1, code_space_data_.size()); DCHECK_EQ(1, code_space_data_.size());
single_code_space_region = code_space_data_[0].region;
}
lazy_compile_table_ = CreateEmptyJumpTableInRegion( lazy_compile_table_ = CreateEmptyJumpTableInRegion(
JumpTableAssembler::SizeForNumberOfLazyFunctions(num_slots), JumpTableAssembler::SizeForNumberOfLazyFunctions(num_slots),
code_space_data_[0].region); single_code_space_region);
JumpTableAssembler::GenerateLazyCompileTable( JumpTableAssembler::GenerateLazyCompileTable(
lazy_compile_table_->instruction_start(), num_slots, lazy_compile_table_->instruction_start(), num_slots,
module_->num_imported_functions, module_->num_imported_functions,
...@@ -837,13 +848,13 @@ void NativeModule::UseLazyStub(uint32_t func_index) { ...@@ -837,13 +848,13 @@ void NativeModule::UseLazyStub(uint32_t func_index) {
// Add jump table entry for jump to the lazy compile stub. // Add jump table entry for jump to the lazy compile stub.
uint32_t slot_index = func_index - module_->num_imported_functions; uint32_t slot_index = func_index - module_->num_imported_functions;
DCHECK_NULL(code_table_[slot_index]);
DCHECK_NE(runtime_stub_entry(WasmCode::kWasmCompileLazy), kNullAddress); DCHECK_NE(runtime_stub_entry(WasmCode::kWasmCompileLazy), kNullAddress);
Address lazy_compile_target = Address lazy_compile_target =
lazy_compile_table_->instruction_start() + lazy_compile_table_->instruction_start() +
JumpTableAssembler::LazyCompileSlotIndexToOffset(slot_index); JumpTableAssembler::LazyCompileSlotIndexToOffset(slot_index);
JumpTableAssembler::PatchJumpTableSlot(main_jump_table_->instruction_start(), base::MutexGuard guard(&allocation_mutex_);
slot_index, lazy_compile_target, PatchJumpTablesLocked(func_index, lazy_compile_target);
WasmCode::kFlushICache);
} }
// TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS} // TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
...@@ -859,7 +870,12 @@ void NativeModule::SetRuntimeStubs(Isolate* isolate) { ...@@ -859,7 +870,12 @@ void NativeModule::SetRuntimeStubs(Isolate* isolate) {
#endif // V8_EMBEDDED_BUILTINS #endif // V8_EMBEDDED_BUILTINS
DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once. DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
WasmCodeRefScope code_ref_scope; WasmCodeRefScope code_ref_scope;
base::AddressRegion single_code_space_region;
{
base::MutexGuard guard(&allocation_mutex_);
DCHECK_EQ(1, code_space_data_.size()); DCHECK_EQ(1, code_space_data_.size());
single_code_space_region = code_space_data_[0].region;
}
int num_function_slots = int num_function_slots =
kNeedsFarJumpsBetweenCodeSpaces && FLAG_wasm_far_jump_table kNeedsFarJumpsBetweenCodeSpaces && FLAG_wasm_far_jump_table
? static_cast<int>(module_->num_declared_functions) ? static_cast<int>(module_->num_declared_functions)
...@@ -867,7 +883,7 @@ void NativeModule::SetRuntimeStubs(Isolate* isolate) { ...@@ -867,7 +883,7 @@ void NativeModule::SetRuntimeStubs(Isolate* isolate) {
WasmCode* jump_table = CreateEmptyJumpTableInRegion( WasmCode* jump_table = CreateEmptyJumpTableInRegion(
JumpTableAssembler::SizeForNumberOfFarJumpSlots( JumpTableAssembler::SizeForNumberOfFarJumpSlots(
WasmCode::kRuntimeStubCount, num_function_slots), WasmCode::kRuntimeStubCount, num_function_slots),
code_space_data_[0].region); single_code_space_region);
Address base = jump_table->instruction_start(); Address base = jump_table->instruction_start();
EmbeddedData embedded_data = EmbeddedData::FromBlob(); EmbeddedData embedded_data = EmbeddedData::FromBlob();
#define RUNTIME_STUB(Name) Builtins::k##Name, #define RUNTIME_STUB(Name) Builtins::k##Name,
...@@ -1040,9 +1056,7 @@ WasmCode* NativeModule::PublishCodeLocked(std::unique_ptr<WasmCode> code) { ...@@ -1040,9 +1056,7 @@ WasmCode* NativeModule::PublishCodeLocked(std::unique_ptr<WasmCode> code) {
} }
if (update_jump_table) { if (update_jump_table) {
JumpTableAssembler::PatchJumpTableSlot( PatchJumpTablesLocked(code->index(), code->instruction_start());
main_jump_table_->instruction_start(), slot_idx,
code->instruction_start(), WasmCode::kFlushICache);
} }
} }
WasmCodeRefScope::AddRef(code.get()); WasmCodeRefScope::AddRef(code.get());
...@@ -1136,10 +1150,22 @@ WasmCode* NativeModule::CreateEmptyJumpTableInRegion( ...@@ -1136,10 +1150,22 @@ WasmCode* NativeModule::CreateEmptyJumpTableInRegion(
return PublishCode(std::move(code)); return PublishCode(std::move(code));
} }
void NativeModule::PatchJumpTablesLocked(uint32_t func_index, Address target) {
// The caller must hold the {allocation_mutex_}, thus we fail to lock it here.
DCHECK(!allocation_mutex_.TryLock());
uint32_t slot_index = func_index - module_->num_imported_functions;
for (auto& code_space_data : code_space_data_) {
if (!code_space_data.jump_table) continue;
Address jump_table_base = code_space_data.jump_table->instruction_start();
JumpTableAssembler::PatchJumpTableSlot(jump_table_base, slot_index, target,
WasmCode::kFlushICache);
}
}
void NativeModule::AddCodeSpace(base::AddressRegion region) { void NativeModule::AddCodeSpace(base::AddressRegion region) {
// Each code space must be at least twice as large as the overhead per code // Each code space must be at least twice as large as the overhead per code
// space. Otherwise, we are wasting too much memory. // space. Otherwise, we are wasting too much memory.
const bool is_first_code_space = code_space_data_.empty();
const bool implicit_alloc_disabled = const bool implicit_alloc_disabled =
engine_->code_manager()->IsImplicitAllocationsDisabledForTesting(); engine_->code_manager()->IsImplicitAllocationsDisabledForTesting();
...@@ -1164,6 +1190,11 @@ void NativeModule::AddCodeSpace(base::AddressRegion region) { ...@@ -1164,6 +1190,11 @@ void NativeModule::AddCodeSpace(base::AddressRegion region) {
WasmCode* jump_table = nullptr; WasmCode* jump_table = nullptr;
const uint32_t num_wasm_functions = module_->num_declared_functions; const uint32_t num_wasm_functions = module_->num_declared_functions;
const bool has_functions = num_wasm_functions > 0; const bool has_functions = num_wasm_functions > 0;
bool is_first_code_space;
{
base::MutexGuard guard(&allocation_mutex_);
is_first_code_space = code_space_data_.empty();
}
const bool needs_jump_table = const bool needs_jump_table =
has_functions && is_first_code_space && !implicit_alloc_disabled; has_functions && is_first_code_space && !implicit_alloc_disabled;
...@@ -1175,6 +1206,7 @@ void NativeModule::AddCodeSpace(base::AddressRegion region) { ...@@ -1175,6 +1206,7 @@ void NativeModule::AddCodeSpace(base::AddressRegion region) {
if (is_first_code_space) main_jump_table_ = jump_table; if (is_first_code_space) main_jump_table_ = jump_table;
base::MutexGuard guard(&allocation_mutex_);
code_space_data_.push_back(CodeSpaceData{region, jump_table}); code_space_data_.push_back(CodeSpaceData{region, jump_table});
} }
......
...@@ -528,6 +528,9 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -528,6 +528,9 @@ class V8_EXPORT_PRIVATE NativeModule final {
WasmCode* CreateEmptyJumpTableInRegion(uint32_t jump_table_size, WasmCode* CreateEmptyJumpTableInRegion(uint32_t jump_table_size,
base::AddressRegion); base::AddressRegion);
// Hold the {allocation_mutex_} when calling this method.
void PatchJumpTablesLocked(uint32_t func_index, Address target);
// Called by the {WasmCodeAllocator} to register a new code space. // Called by the {WasmCodeAllocator} to register a new code space.
void AddCodeSpace(base::AddressRegion); void AddCodeSpace(base::AddressRegion);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment