Commit 85ed06e9 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[Liftoff] Remove code prologue to check for optimized code

This prologue is not needed any more now that we have the jump table.
If optimized code exists, we will not even enter the Liftoff code any
more, but instead jump to the optimized code right away.
This also allows to remove the {WASM_CODE_TABLE_ENTRY} relocation info
kind.

R=mstarzinger@chromium.org

Bug: v8:7758
Change-Id: I0449693d7434088fb264104fe59365d7ca2b74c6
Reviewed-on: https://chromium-review.googlesource.com/1110222Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53954}
parent d817090f
...@@ -143,13 +143,6 @@ Address RelocInfo::target_internal_reference_address() { ...@@ -143,13 +143,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_; return pc_;
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) { Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_)); DCHECK(IsRuntimeEntry(rmode_));
return target_address(); return target_address();
......
...@@ -743,13 +743,6 @@ Address RelocInfo::target_internal_reference_address() { ...@@ -743,13 +743,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_; return pc_;
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) { Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_)); DCHECK(IsRuntimeEntry(rmode_));
return origin->runtime_entry_at(pc_); return origin->runtime_entry_at(pc_);
......
...@@ -564,8 +564,6 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) { ...@@ -564,8 +564,6 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
return "internal wasm call"; return "internal wasm call";
case WASM_STUB_CALL: case WASM_STUB_CALL:
return "wasm stub call"; return "wasm stub call";
case WASM_CODE_TABLE_ENTRY:
return "wasm code table entry";
case JS_TO_WASM_CALL: case JS_TO_WASM_CALL:
return "js to wasm call"; return "js to wasm call";
case NUMBER_OF_MODES: case NUMBER_OF_MODES:
...@@ -664,7 +662,6 @@ void RelocInfo::Verify(Isolate* isolate) { ...@@ -664,7 +662,6 @@ void RelocInfo::Verify(Isolate* isolate) {
case WASM_CALL: case WASM_CALL:
case WASM_STUB_CALL: case WASM_STUB_CALL:
case JS_TO_WASM_CALL: case JS_TO_WASM_CALL:
case WASM_CODE_TABLE_ENTRY:
case NONE: case NONE:
break; break;
case NUMBER_OF_MODES: case NUMBER_OF_MODES:
......
...@@ -461,10 +461,6 @@ class RelocInfo { ...@@ -461,10 +461,6 @@ class RelocInfo {
// cannot be encoded as part of another record. // cannot be encoded as part of another record.
PC_JUMP, PC_JUMP,
// Points to a wasm code table entry.
// TODO(clemensh): Remove this once we have the jump table (issue 7758).
WASM_CODE_TABLE_ENTRY,
// Pseudo-types // Pseudo-types
NUMBER_OF_MODES, NUMBER_OF_MODES,
NONE, // never recorded value NONE, // never recorded value
...@@ -619,8 +615,6 @@ class RelocInfo { ...@@ -619,8 +615,6 @@ class RelocInfo {
INLINE(void set_target_cell( INLINE(void set_target_cell(
Cell* cell, WriteBarrierMode write_barrier_mode = UPDATE_WRITE_BARRIER, Cell* cell, WriteBarrierMode write_barrier_mode = UPDATE_WRITE_BARRIER,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)); ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
INLINE(void set_wasm_code_table_entry(
Address, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
INLINE(void set_target_external_reference( INLINE(void set_target_external_reference(
Address, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)); Address, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
......
...@@ -137,15 +137,6 @@ Address RelocInfo::target_internal_reference_address() { ...@@ -137,15 +137,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_; return pc_;
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Memory::Address_at(pc_) = target;
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
Assembler::FlushICache(pc_, sizeof(Address));
}
}
Address RelocInfo::target_runtime_entry(Assembler* origin) { Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_)); DCHECK(IsRuntimeEntry(rmode_));
return static_cast<Address>(*reinterpret_cast<int32_t*>(pc_)); return static_cast<Address>(*reinterpret_cast<int32_t*>(pc_));
......
...@@ -250,13 +250,6 @@ Address RelocInfo::target_internal_reference_address() { ...@@ -250,13 +250,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_; return pc_;
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) { Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_)); DCHECK(IsRuntimeEntry(rmode_));
return target_address(); return target_address();
......
...@@ -212,13 +212,6 @@ Address RelocInfo::target_internal_reference_address() { ...@@ -212,13 +212,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_; return pc_;
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) { Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_)); DCHECK(IsRuntimeEntry(rmode_));
return target_address(); return target_address();
......
...@@ -199,13 +199,6 @@ void RelocInfo::set_target_external_reference( ...@@ -199,13 +199,6 @@ void RelocInfo::set_target_external_reference(
icache_flush_mode); icache_flush_mode);
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) { Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_)); DCHECK(IsRuntimeEntry(rmode_));
return target_address(); return target_address();
......
...@@ -181,13 +181,6 @@ void RelocInfo::set_target_external_reference( ...@@ -181,13 +181,6 @@ void RelocInfo::set_target_external_reference(
icache_flush_mode); icache_flush_mode);
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) { Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_)); DCHECK(IsRuntimeEntry(rmode_));
return target_address(); return target_address();
......
...@@ -136,7 +136,7 @@ class LiftoffCompiler { ...@@ -136,7 +136,7 @@ class LiftoffCompiler {
SourcePositionTableBuilder* source_position_table_builder, SourcePositionTableBuilder* source_position_table_builder,
std::vector<trap_handler::ProtectedInstructionData>* std::vector<trap_handler::ProtectedInstructionData>*
protected_instructions, protected_instructions,
Zone* compilation_zone, WasmCode* const* code_table_entry) Zone* compilation_zone)
: asm_(liftoff_asm), : asm_(liftoff_asm),
descriptor_( descriptor_(
GetLoweredCallDescriptor(compilation_zone, call_descriptor)), GetLoweredCallDescriptor(compilation_zone, call_descriptor)),
...@@ -149,8 +149,7 @@ class LiftoffCompiler { ...@@ -149,8 +149,7 @@ class LiftoffCompiler {
source_position_table_builder_(source_position_table_builder), source_position_table_builder_(source_position_table_builder),
protected_instructions_(protected_instructions), protected_instructions_(protected_instructions),
compilation_zone_(compilation_zone), compilation_zone_(compilation_zone),
safepoint_table_builder_(compilation_zone_), safepoint_table_builder_(compilation_zone_) {}
code_table_entry_(code_table_entry) {}
~LiftoffCompiler() { BindUnboundLabels(nullptr); } ~LiftoffCompiler() { BindUnboundLabels(nullptr); }
...@@ -295,61 +294,6 @@ class LiftoffCompiler { ...@@ -295,61 +294,6 @@ class LiftoffCompiler {
__ bind(ool.continuation.get()); __ bind(ool.continuation.get());
} }
// Inserts a check whether the optimized version of this code already exists.
// If so, it redirects execution to the optimized code.
void JumpToOptimizedCodeIfExisting(LiftoffRegList param_regs) {
// We need one register to keep the address of the optimized
// code that is not used to keep parameters.
LiftoffRegister address_tmp = LiftoffRegister(kNoParamRegister);
DCHECK(!param_regs.has(address_tmp));
LiftoffRegList available_regs = kGpCacheRegList & ~param_regs;
// We already use the {address_tmp} later, so remove it too.
available_regs.clear(address_tmp);
// We require one general purpose register.
if (available_regs.is_empty()) {
LiftoffRegList taken_gp_regs = kGpCacheRegList & param_regs;
LiftoffRegister reg = taken_gp_regs.GetFirstRegSet();
available_regs.set(reg);
}
LiftoffRegister tmp = available_regs.GetFirstRegSet();
if (param_regs.has(tmp)) __ PushRegisters(LiftoffRegList::ForRegs(tmp));
static LoadType kPointerLoadType =
LoadType::ForValueType(LiftoffAssembler::kWasmIntPtr);
using int_t = std::conditional<kPointerSize == 8, uint64_t, uint32_t>::type;
static_assert(sizeof(int_t) == sizeof(uintptr_t), "weird uintptr_t");
// Get the address of the WasmCode* currently stored in the code table.
__ LoadConstant(address_tmp,
WasmValue(reinterpret_cast<int_t>(code_table_entry_)),
RelocInfo::WASM_CODE_TABLE_ENTRY);
// Load the corresponding WasmCode*.
LiftoffRegister wasm_code_address = tmp;
__ Load(wasm_code_address, address_tmp.gp(), Register::no_reg(), 0,
kPointerLoadType, param_regs);
// Load its target address ({instuctions_.start()}).
__ Load(address_tmp, wasm_code_address.gp(), Register::no_reg(),
WasmCode::kInstructionStartOffset, kPointerLoadType, param_regs);
// Get the current code's target address ({instructions_.start()}).
LiftoffRegister code_start_address = tmp;
__ ComputeCodeStartAddress(code_start_address.gp());
// If the current code's target address is the same as the
// target address of the stored WasmCode, then continue executing, otherwise
// jump to the updated WasmCode.
Label cont;
__ emit_cond_jump(kEqual, &cont, LiftoffAssembler::kWasmIntPtr,
address_tmp.gp(), code_start_address.gp());
if (param_regs.has(tmp)) __ PopRegisters(LiftoffRegList::ForRegs(tmp));
__ emit_jump(address_tmp.gp());
__ bind(&cont);
if (param_regs.has(tmp)) __ PopRegisters(LiftoffRegList::ForRegs(tmp));
}
void StartFunctionBody(Decoder* decoder, Control* block) { void StartFunctionBody(Decoder* decoder, Control* block) {
for (uint32_t i = 0; i < __ num_locals(); ++i) { for (uint32_t i = 0; i < __ num_locals(); ++i) {
if (!CheckSupportedType(decoder, kTypes_ilfd, __ local_type(i), "param")) if (!CheckSupportedType(decoder, kTypes_ilfd, __ local_type(i), "param"))
...@@ -370,21 +314,6 @@ class LiftoffCompiler { ...@@ -370,21 +314,6 @@ class LiftoffCompiler {
uint32_t num_params = uint32_t num_params =
static_cast<uint32_t>(decoder->sig_->parameter_count()); static_cast<uint32_t>(decoder->sig_->parameter_count());
if (FLAG_wasm_tier_up) {
if (!kNoParamRegister.is_valid()) {
unsupported(decoder, "Please define kNoParamRegister.");
return;
}
// Collect all registers that are allocated on function entry.
LiftoffRegList param_regs;
param_regs.set(instance_reg);
CollectReservedRegsForParameters(kInstanceParameterIndex + 1, num_params,
param_regs);
JumpToOptimizedCodeIfExisting(param_regs);
}
__ EnterFrame(StackFrame::WASM_COMPILED); __ EnterFrame(StackFrame::WASM_COMPILED);
__ set_has_frame(true); __ set_has_frame(true);
pc_offset_stack_frame_construction_ = __ PrepareStackFrame(); pc_offset_stack_frame_construction_ = __ PrepareStackFrame();
...@@ -1864,10 +1793,6 @@ class LiftoffCompiler { ...@@ -1864,10 +1793,6 @@ class LiftoffCompiler {
// patch the actually needed stack size in the end. // patch the actually needed stack size in the end.
uint32_t pc_offset_stack_frame_construction_ = 0; uint32_t pc_offset_stack_frame_construction_ = 0;
// Points to the cell within the {code_table_} of the NativeModule,
// which corresponds to the currently compiled function
WasmCode* const* code_table_entry_ = nullptr;
void TraceCacheState(Decoder* decoder) const { void TraceCacheState(Decoder* decoder) const {
#ifdef DEBUG #ifdef DEBUG
if (!FLAG_trace_liftoff || !FLAG_trace_wasm_decoder) return; if (!FLAG_trace_liftoff || !FLAG_trace_wasm_decoder) return;
...@@ -1905,18 +1830,13 @@ bool LiftoffCompilationUnit::ExecuteCompilation() { ...@@ -1905,18 +1830,13 @@ bool LiftoffCompilationUnit::ExecuteCompilation() {
compiler::GetWasmCallDescriptor(&zone, wasm_unit_->func_body_.sig); compiler::GetWasmCallDescriptor(&zone, wasm_unit_->func_body_.sig);
base::Optional<TimedHistogramScope> liftoff_compile_time_scope( base::Optional<TimedHistogramScope> liftoff_compile_time_scope(
base::in_place, wasm_unit_->counters_->liftoff_compile_time()); base::in_place, wasm_unit_->counters_->liftoff_compile_time());
// TODO(clemensh): Remove this once we have the jump table (issue 7758).
wasm::WasmCode** code_table_entry =
&wasm_unit_->native_module_
->code_table()[wasm_unit_->func_index_ -
wasm_unit_->native_module_->num_imported_functions()];
DCHECK(!protected_instructions_); DCHECK(!protected_instructions_);
protected_instructions_.reset( protected_instructions_.reset(
new std::vector<trap_handler::ProtectedInstructionData>()); new std::vector<trap_handler::ProtectedInstructionData>());
wasm::WasmFullDecoder<wasm::Decoder::kValidate, wasm::LiftoffCompiler> wasm::WasmFullDecoder<wasm::Decoder::kValidate, wasm::LiftoffCompiler>
decoder(&zone, module, wasm_unit_->func_body_, &asm_, call_descriptor, decoder(&zone, module, wasm_unit_->func_body_, &asm_, call_descriptor,
wasm_unit_->env_, &source_position_table_builder_, wasm_unit_->env_, &source_position_table_builder_,
protected_instructions_.get(), &zone, code_table_entry); protected_instructions_.get(), &zone);
decoder.Decode(); decoder.Decode();
liftoff_compile_time_scope.reset(); liftoff_compile_time_scope.reset();
if (!decoder.interface().ok()) { if (!decoder.interface().ok()) {
......
...@@ -189,7 +189,6 @@ void WasmCode::Validate() const { ...@@ -189,7 +189,6 @@ void WasmCode::Validate() const {
CHECK(contains(target)); CHECK(contains(target));
break; break;
} }
case RelocInfo::WASM_CODE_TABLE_ENTRY:
case RelocInfo::WASM_CALL: case RelocInfo::WASM_CALL:
case RelocInfo::JS_TO_WASM_CALL: case RelocInfo::JS_TO_WASM_CALL:
case RelocInfo::EXTERNAL_REFERENCE: case RelocInfo::EXTERNAL_REFERENCE:
...@@ -336,12 +335,6 @@ void NativeModule::SetNumFunctionsForTesting(uint32_t num_functions) { ...@@ -336,12 +335,6 @@ void NativeModule::SetNumFunctionsForTesting(uint32_t num_functions) {
num_functions_ = num_functions; num_functions_ = num_functions;
} }
void NativeModule::SetCodeForTesting(uint32_t index, WasmCode* code) {
DCHECK_LT(index, num_functions_);
DCHECK_LE(num_imported_functions_, index);
code_table_[index - num_imported_functions_] = code;
}
void NativeModule::LogWasmCodes(Isolate* isolate) { void NativeModule::LogWasmCodes(Isolate* isolate) {
if (!wasm::WasmCode::ShouldBeLogged(isolate)) return; if (!wasm::WasmCode::ShouldBeLogged(isolate)) return;
......
...@@ -306,7 +306,6 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -306,7 +306,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
// on the fly, and bypass the instance builder pipeline. // on the fly, and bypass the instance builder pipeline.
void ReserveCodeTableForTesting(uint32_t max_functions); void ReserveCodeTableForTesting(uint32_t max_functions);
void SetNumFunctionsForTesting(uint32_t num_functions); void SetNumFunctionsForTesting(uint32_t num_functions);
void SetCodeForTesting(uint32_t index, WasmCode* code);
void LogWasmCodes(Isolate* isolate); void LogWasmCodes(Isolate* isolate);
......
...@@ -479,8 +479,7 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) { ...@@ -479,8 +479,7 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
// Relocate the code. // Relocate the code.
int mask = RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL) | int mask = RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE);
RelocInfo::ModeMask(RelocInfo::WASM_CODE_TABLE_ENTRY);
for (RelocIterator iter(ret->instructions(), ret->reloc_info(), for (RelocIterator iter(ret->instructions(), ret->reloc_info(),
ret->constant_pool(), mask); ret->constant_pool(), mask);
!iter.done(); iter.next()) { !iter.done(); iter.next()) {
...@@ -503,17 +502,6 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) { ...@@ -503,17 +502,6 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
iter.rinfo()->set_target_external_reference(address, SKIP_ICACHE_FLUSH); iter.rinfo()->set_target_external_reference(address, SKIP_ICACHE_FLUSH);
break; break;
} }
case RelocInfo::WASM_CODE_TABLE_ENTRY: {
DCHECK(FLAG_wasm_tier_up);
DCHECK(ret->is_liftoff());
uint32_t code_table_index =
ret->index() - native_module_->num_imported_functions_;
WasmCode** code_table_entry =
&native_module_->code_table()[code_table_index];
iter.rinfo()->set_wasm_code_table_entry(
reinterpret_cast<Address>(code_table_entry), SKIP_ICACHE_FLUSH);
break;
}
default: default:
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -339,15 +339,6 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { ...@@ -339,15 +339,6 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
} }
} }
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Memory::Address_at(pc_) = target;
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
Assembler::FlushICache(pc_, sizeof(Address));
}
}
Address RelocInfo::target_external_reference() { Address RelocInfo::target_external_reference() {
DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE); DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
return Memory::Address_at(pc_); return Memory::Address_at(pc_);
......
...@@ -3407,66 +3407,47 @@ WASM_EXEC_TEST(I64RemUOnDifferentRegisters) { ...@@ -3407,66 +3407,47 @@ WASM_EXEC_TEST(I64RemUOnDifferentRegisters) {
}); });
} }
TEST(Liftoff_prologue) { TEST(Liftoff_tier_up) {
// The tested prologue is only inserted in tiering mode. The prologue WasmRunner<int32_t, int32_t, int32_t> r(WasmExecutionMode::kExecuteLiftoff);
// is responsible for jumping to the optimized, tiered up code if
// it exists. WasmFunctionCompiler& add = r.NewFunction<int32_t, int32_t, int32_t>("add");
FlagScope<bool> tier_up_scope(&v8::internal::FLAG_wasm_tier_up, true); BUILD(add, WASM_I32_ADD(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
// The number of parameters define how many registers are used WasmFunctionCompiler& sub = r.NewFunction<int32_t, int32_t, int32_t>("sub");
// on a function call. The Liftoff-prologue has to make sure to BUILD(sub, WASM_I32_SUB(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
// correctly save prior, and restore all parameters
// after the prologue. // Create the main function, which shall call {add}.
const uint8_t kNumParams = 4; BUILD(r, WASM_CALL_FUNCTION(add.function_index(), WASM_GET_LOCAL(0),
ValueType int_types[kNumParams + 1]; WASM_GET_LOCAL(1)));
for (int i = 0; i < kNumParams + 1; i++) int_types[i] = kWasmI32;
FunctionSig sig_i_x(1, kNumParams, int_types);
WasmRunner<int32_t, int32_t, int32_t, int32_t, int32_t> r(
WasmExecutionMode::kExecuteLiftoff);
// Define two functions: {add_locals} and {sub_locals}, whereas
// {sub_locals} shall be our mockup optimized code.
std::vector<byte> add_locals, sub_locals;
ADD_CODE(add_locals, WASM_I32_ADD(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
ADD_CODE(sub_locals, WASM_I32_SUB(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
for (int i = 2; i < kNumParams; ++i) {
ADD_CODE(add_locals, WASM_GET_LOCAL(i), kExprI32Add);
ADD_CODE(sub_locals, WASM_GET_LOCAL(i), kExprI32Sub);
}
WasmFunctionCompiler& add_compiler = r.NewFunction(&sig_i_x);
add_compiler.Build(&add_locals[0], &add_locals[0] + add_locals.size());
WasmFunctionCompiler& sub_compiler = r.NewFunction(&sig_i_x);
sub_compiler.Build(&sub_locals[0], &sub_locals[0] + sub_locals.size());
// Create a calling function, which shall call {add_locals}.
std::vector<byte> call;
for (int i = 0; i < kNumParams; ++i) {
ADD_CODE(call, WASM_GET_LOCAL(i));
}
ADD_CODE(call, kExprCallFunction,
static_cast<byte>(add_compiler.function_index()));
r.Build(&call[0], &call[0] + call.size());
NativeModule* native_module = NativeModule* native_module =
r.builder().instance_object()->module_object()->native_module(); r.builder().instance_object()->module_object()->native_module();
// This test only works if we managed to compile with Liftoff. // This test only works if we managed to compile with Liftoff.
if (native_module->code(add_compiler.function_index())->is_liftoff()) { if (native_module->code(add.function_index())->is_liftoff()) {
// First run should execute {add_locals}. // First run should execute {add}.
CHECK_EQ(10, r.Call(1, 2, 3, 4)); CHECK_EQ(18, r.Call(11, 7));
// Update the native_module to contain the "optimized" code ({sub_locals}). // Now make a copy of the {sub} function, and add it to the native module at
native_module->SetCodeForTesting( // the index of {add}.
add_compiler.function_index(), CodeDesc desc;
native_module->code(sub_compiler.function_index())); memset(&desc, 0, sizeof(CodeDesc));
WasmCode* sub_code = native_module->code(sub.function_index());
// Second run should execute {add_locals}, which should detect that size_t sub_size = sub_code->instructions().size();
// the code was updated, and run {sub_locals}. std::unique_ptr<byte[]> buffer(new byte[sub_code->instructions().size()]);
CHECK_EQ(-8, r.Call(1, 2, 3, 4)); memcpy(buffer.get(), sub_code->instructions().start(), sub_size);
desc.buffer = buffer.get();
desc.instr_size = static_cast<int>(sub_size);
Isolate* isolate = CcTest::InitIsolateOnce();
Handle<ByteArray> source_pos = isolate->factory()->empty_byte_array();
std::unique_ptr<ProtectedInstructions> protected_instructions(
new ProtectedInstructions(sub_code->protected_instructions()));
native_module->AddCode(desc, 0, add.function_index(), 0, 0,
std::move(protected_instructions), source_pos,
WasmCode::kOther);
// Second run should now execute {sub}.
CHECK_EQ(4, r.Call(11, 7));
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment