Commit 85ed06e9 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[Liftoff] Remove code prologue to check for optimized code

This prologue is not needed any more now that we have the jump table.
If optimized code exists, we will not even enter the Liftoff code any
more, but instead jump to the optimized code right away.
This also allows to remove the {WASM_CODE_TABLE_ENTRY} relocation info
kind.

R=mstarzinger@chromium.org

Bug: v8:7758
Change-Id: I0449693d7434088fb264104fe59365d7ca2b74c6
Reviewed-on: https://chromium-review.googlesource.com/1110222Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53954}
parent d817090f
......@@ -143,13 +143,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_;
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_));
return target_address();
......
......@@ -743,13 +743,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_;
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_));
return origin->runtime_entry_at(pc_);
......
......@@ -564,8 +564,6 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
return "internal wasm call";
case WASM_STUB_CALL:
return "wasm stub call";
case WASM_CODE_TABLE_ENTRY:
return "wasm code table entry";
case JS_TO_WASM_CALL:
return "js to wasm call";
case NUMBER_OF_MODES:
......@@ -664,7 +662,6 @@ void RelocInfo::Verify(Isolate* isolate) {
case WASM_CALL:
case WASM_STUB_CALL:
case JS_TO_WASM_CALL:
case WASM_CODE_TABLE_ENTRY:
case NONE:
break;
case NUMBER_OF_MODES:
......
......@@ -461,10 +461,6 @@ class RelocInfo {
// cannot be encoded as part of another record.
PC_JUMP,
// Points to a wasm code table entry.
// TODO(clemensh): Remove this once we have the jump table (issue 7758).
WASM_CODE_TABLE_ENTRY,
// Pseudo-types
NUMBER_OF_MODES,
NONE, // never recorded value
......@@ -619,8 +615,6 @@ class RelocInfo {
INLINE(void set_target_cell(
Cell* cell, WriteBarrierMode write_barrier_mode = UPDATE_WRITE_BARRIER,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
INLINE(void set_wasm_code_table_entry(
Address, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
INLINE(void set_target_external_reference(
Address, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED));
......
......@@ -137,15 +137,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_;
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Memory::Address_at(pc_) = target;
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
Assembler::FlushICache(pc_, sizeof(Address));
}
}
Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_));
return static_cast<Address>(*reinterpret_cast<int32_t*>(pc_));
......
......@@ -250,13 +250,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_;
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_));
return target_address();
......
......@@ -212,13 +212,6 @@ Address RelocInfo::target_internal_reference_address() {
return pc_;
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_));
return target_address();
......
......@@ -199,13 +199,6 @@ void RelocInfo::set_target_external_reference(
icache_flush_mode);
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_));
return target_address();
......
......@@ -181,13 +181,6 @@ void RelocInfo::set_target_external_reference(
icache_flush_mode);
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Assembler::set_target_address_at(pc_, constant_pool_, target,
icache_flush_mode);
}
Address RelocInfo::target_runtime_entry(Assembler* origin) {
DCHECK(IsRuntimeEntry(rmode_));
return target_address();
......
......@@ -136,7 +136,7 @@ class LiftoffCompiler {
SourcePositionTableBuilder* source_position_table_builder,
std::vector<trap_handler::ProtectedInstructionData>*
protected_instructions,
Zone* compilation_zone, WasmCode* const* code_table_entry)
Zone* compilation_zone)
: asm_(liftoff_asm),
descriptor_(
GetLoweredCallDescriptor(compilation_zone, call_descriptor)),
......@@ -149,8 +149,7 @@ class LiftoffCompiler {
source_position_table_builder_(source_position_table_builder),
protected_instructions_(protected_instructions),
compilation_zone_(compilation_zone),
safepoint_table_builder_(compilation_zone_),
code_table_entry_(code_table_entry) {}
safepoint_table_builder_(compilation_zone_) {}
~LiftoffCompiler() { BindUnboundLabels(nullptr); }
......@@ -295,61 +294,6 @@ class LiftoffCompiler {
__ bind(ool.continuation.get());
}
// Inserts a check whether the optimized version of this code already exists.
// If so, it redirects execution to the optimized code.
void JumpToOptimizedCodeIfExisting(LiftoffRegList param_regs) {
// We need one register to keep the address of the optimized
// code that is not used to keep parameters.
LiftoffRegister address_tmp = LiftoffRegister(kNoParamRegister);
DCHECK(!param_regs.has(address_tmp));
LiftoffRegList available_regs = kGpCacheRegList & ~param_regs;
// We already use the {address_tmp} later, so remove it too.
available_regs.clear(address_tmp);
// We require one general purpose register.
if (available_regs.is_empty()) {
LiftoffRegList taken_gp_regs = kGpCacheRegList & param_regs;
LiftoffRegister reg = taken_gp_regs.GetFirstRegSet();
available_regs.set(reg);
}
LiftoffRegister tmp = available_regs.GetFirstRegSet();
if (param_regs.has(tmp)) __ PushRegisters(LiftoffRegList::ForRegs(tmp));
static LoadType kPointerLoadType =
LoadType::ForValueType(LiftoffAssembler::kWasmIntPtr);
using int_t = std::conditional<kPointerSize == 8, uint64_t, uint32_t>::type;
static_assert(sizeof(int_t) == sizeof(uintptr_t), "weird uintptr_t");
// Get the address of the WasmCode* currently stored in the code table.
__ LoadConstant(address_tmp,
WasmValue(reinterpret_cast<int_t>(code_table_entry_)),
RelocInfo::WASM_CODE_TABLE_ENTRY);
// Load the corresponding WasmCode*.
LiftoffRegister wasm_code_address = tmp;
__ Load(wasm_code_address, address_tmp.gp(), Register::no_reg(), 0,
kPointerLoadType, param_regs);
// Load its target address ({instuctions_.start()}).
__ Load(address_tmp, wasm_code_address.gp(), Register::no_reg(),
WasmCode::kInstructionStartOffset, kPointerLoadType, param_regs);
// Get the current code's target address ({instructions_.start()}).
LiftoffRegister code_start_address = tmp;
__ ComputeCodeStartAddress(code_start_address.gp());
// If the current code's target address is the same as the
// target address of the stored WasmCode, then continue executing, otherwise
// jump to the updated WasmCode.
Label cont;
__ emit_cond_jump(kEqual, &cont, LiftoffAssembler::kWasmIntPtr,
address_tmp.gp(), code_start_address.gp());
if (param_regs.has(tmp)) __ PopRegisters(LiftoffRegList::ForRegs(tmp));
__ emit_jump(address_tmp.gp());
__ bind(&cont);
if (param_regs.has(tmp)) __ PopRegisters(LiftoffRegList::ForRegs(tmp));
}
void StartFunctionBody(Decoder* decoder, Control* block) {
for (uint32_t i = 0; i < __ num_locals(); ++i) {
if (!CheckSupportedType(decoder, kTypes_ilfd, __ local_type(i), "param"))
......@@ -370,21 +314,6 @@ class LiftoffCompiler {
uint32_t num_params =
static_cast<uint32_t>(decoder->sig_->parameter_count());
if (FLAG_wasm_tier_up) {
if (!kNoParamRegister.is_valid()) {
unsupported(decoder, "Please define kNoParamRegister.");
return;
}
// Collect all registers that are allocated on function entry.
LiftoffRegList param_regs;
param_regs.set(instance_reg);
CollectReservedRegsForParameters(kInstanceParameterIndex + 1, num_params,
param_regs);
JumpToOptimizedCodeIfExisting(param_regs);
}
__ EnterFrame(StackFrame::WASM_COMPILED);
__ set_has_frame(true);
pc_offset_stack_frame_construction_ = __ PrepareStackFrame();
......@@ -1864,10 +1793,6 @@ class LiftoffCompiler {
// patch the actually needed stack size in the end.
uint32_t pc_offset_stack_frame_construction_ = 0;
// Points to the cell within the {code_table_} of the NativeModule,
// which corresponds to the currently compiled function
WasmCode* const* code_table_entry_ = nullptr;
void TraceCacheState(Decoder* decoder) const {
#ifdef DEBUG
if (!FLAG_trace_liftoff || !FLAG_trace_wasm_decoder) return;
......@@ -1905,18 +1830,13 @@ bool LiftoffCompilationUnit::ExecuteCompilation() {
compiler::GetWasmCallDescriptor(&zone, wasm_unit_->func_body_.sig);
base::Optional<TimedHistogramScope> liftoff_compile_time_scope(
base::in_place, wasm_unit_->counters_->liftoff_compile_time());
// TODO(clemensh): Remove this once we have the jump table (issue 7758).
wasm::WasmCode** code_table_entry =
&wasm_unit_->native_module_
->code_table()[wasm_unit_->func_index_ -
wasm_unit_->native_module_->num_imported_functions()];
DCHECK(!protected_instructions_);
protected_instructions_.reset(
new std::vector<trap_handler::ProtectedInstructionData>());
wasm::WasmFullDecoder<wasm::Decoder::kValidate, wasm::LiftoffCompiler>
decoder(&zone, module, wasm_unit_->func_body_, &asm_, call_descriptor,
wasm_unit_->env_, &source_position_table_builder_,
protected_instructions_.get(), &zone, code_table_entry);
protected_instructions_.get(), &zone);
decoder.Decode();
liftoff_compile_time_scope.reset();
if (!decoder.interface().ok()) {
......
......@@ -189,7 +189,6 @@ void WasmCode::Validate() const {
CHECK(contains(target));
break;
}
case RelocInfo::WASM_CODE_TABLE_ENTRY:
case RelocInfo::WASM_CALL:
case RelocInfo::JS_TO_WASM_CALL:
case RelocInfo::EXTERNAL_REFERENCE:
......@@ -336,12 +335,6 @@ void NativeModule::SetNumFunctionsForTesting(uint32_t num_functions) {
num_functions_ = num_functions;
}
void NativeModule::SetCodeForTesting(uint32_t index, WasmCode* code) {
DCHECK_LT(index, num_functions_);
DCHECK_LE(num_imported_functions_, index);
code_table_[index - num_imported_functions_] = code;
}
void NativeModule::LogWasmCodes(Isolate* isolate) {
if (!wasm::WasmCode::ShouldBeLogged(isolate)) return;
......
......@@ -306,7 +306,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
// on the fly, and bypass the instance builder pipeline.
void ReserveCodeTableForTesting(uint32_t max_functions);
void SetNumFunctionsForTesting(uint32_t num_functions);
void SetCodeForTesting(uint32_t index, WasmCode* code);
void LogWasmCodes(Isolate* isolate);
......
......@@ -479,8 +479,7 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
// Relocate the code.
int mask = RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::WASM_CODE_TABLE_ENTRY);
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE);
for (RelocIterator iter(ret->instructions(), ret->reloc_info(),
ret->constant_pool(), mask);
!iter.done(); iter.next()) {
......@@ -503,17 +502,6 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
iter.rinfo()->set_target_external_reference(address, SKIP_ICACHE_FLUSH);
break;
}
case RelocInfo::WASM_CODE_TABLE_ENTRY: {
DCHECK(FLAG_wasm_tier_up);
DCHECK(ret->is_liftoff());
uint32_t code_table_index =
ret->index() - native_module_->num_imported_functions_;
WasmCode** code_table_entry =
&native_module_->code_table()[code_table_index];
iter.rinfo()->set_wasm_code_table_entry(
reinterpret_cast<Address>(code_table_entry), SKIP_ICACHE_FLUSH);
break;
}
default:
UNREACHABLE();
}
......
......@@ -339,15 +339,6 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
}
}
void RelocInfo::set_wasm_code_table_entry(Address target,
ICacheFlushMode icache_flush_mode) {
DCHECK(rmode_ == RelocInfo::WASM_CODE_TABLE_ENTRY);
Memory::Address_at(pc_) = target;
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
Assembler::FlushICache(pc_, sizeof(Address));
}
}
Address RelocInfo::target_external_reference() {
DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
return Memory::Address_at(pc_);
......
......@@ -3407,66 +3407,47 @@ WASM_EXEC_TEST(I64RemUOnDifferentRegisters) {
});
}
TEST(Liftoff_prologue) {
// The tested prologue is only inserted in tiering mode. The prologue
// is responsible for jumping to the optimized, tiered up code if
// it exists.
FlagScope<bool> tier_up_scope(&v8::internal::FLAG_wasm_tier_up, true);
// The number of parameters define how many registers are used
// on a function call. The Liftoff-prologue has to make sure to
// correctly save prior, and restore all parameters
// after the prologue.
const uint8_t kNumParams = 4;
ValueType int_types[kNumParams + 1];
for (int i = 0; i < kNumParams + 1; i++) int_types[i] = kWasmI32;
FunctionSig sig_i_x(1, kNumParams, int_types);
WasmRunner<int32_t, int32_t, int32_t, int32_t, int32_t> r(
WasmExecutionMode::kExecuteLiftoff);
// Define two functions: {add_locals} and {sub_locals}, whereas
// {sub_locals} shall be our mockup optimized code.
std::vector<byte> add_locals, sub_locals;
ADD_CODE(add_locals, WASM_I32_ADD(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
ADD_CODE(sub_locals, WASM_I32_SUB(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
for (int i = 2; i < kNumParams; ++i) {
ADD_CODE(add_locals, WASM_GET_LOCAL(i), kExprI32Add);
ADD_CODE(sub_locals, WASM_GET_LOCAL(i), kExprI32Sub);
}
WasmFunctionCompiler& add_compiler = r.NewFunction(&sig_i_x);
add_compiler.Build(&add_locals[0], &add_locals[0] + add_locals.size());
WasmFunctionCompiler& sub_compiler = r.NewFunction(&sig_i_x);
sub_compiler.Build(&sub_locals[0], &sub_locals[0] + sub_locals.size());
// Create a calling function, which shall call {add_locals}.
std::vector<byte> call;
for (int i = 0; i < kNumParams; ++i) {
ADD_CODE(call, WASM_GET_LOCAL(i));
}
ADD_CODE(call, kExprCallFunction,
static_cast<byte>(add_compiler.function_index()));
r.Build(&call[0], &call[0] + call.size());
TEST(Liftoff_tier_up) {
WasmRunner<int32_t, int32_t, int32_t> r(WasmExecutionMode::kExecuteLiftoff);
WasmFunctionCompiler& add = r.NewFunction<int32_t, int32_t, int32_t>("add");
BUILD(add, WASM_I32_ADD(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
WasmFunctionCompiler& sub = r.NewFunction<int32_t, int32_t, int32_t>("sub");
BUILD(sub, WASM_I32_SUB(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)));
// Create the main function, which shall call {add}.
BUILD(r, WASM_CALL_FUNCTION(add.function_index(), WASM_GET_LOCAL(0),
WASM_GET_LOCAL(1)));
NativeModule* native_module =
r.builder().instance_object()->module_object()->native_module();
// This test only works if we managed to compile with Liftoff.
if (native_module->code(add_compiler.function_index())->is_liftoff()) {
// First run should execute {add_locals}.
CHECK_EQ(10, r.Call(1, 2, 3, 4));
// Update the native_module to contain the "optimized" code ({sub_locals}).
native_module->SetCodeForTesting(
add_compiler.function_index(),
native_module->code(sub_compiler.function_index()));
// Second run should execute {add_locals}, which should detect that
// the code was updated, and run {sub_locals}.
CHECK_EQ(-8, r.Call(1, 2, 3, 4));
if (native_module->code(add.function_index())->is_liftoff()) {
// First run should execute {add}.
CHECK_EQ(18, r.Call(11, 7));
// Now make a copy of the {sub} function, and add it to the native module at
// the index of {add}.
CodeDesc desc;
memset(&desc, 0, sizeof(CodeDesc));
WasmCode* sub_code = native_module->code(sub.function_index());
size_t sub_size = sub_code->instructions().size();
std::unique_ptr<byte[]> buffer(new byte[sub_code->instructions().size()]);
memcpy(buffer.get(), sub_code->instructions().start(), sub_size);
desc.buffer = buffer.get();
desc.instr_size = static_cast<int>(sub_size);
Isolate* isolate = CcTest::InitIsolateOnce();
Handle<ByteArray> source_pos = isolate->factory()->empty_byte_array();
std::unique_ptr<ProtectedInstructions> protected_instructions(
new ProtectedInstructions(sub_code->protected_instructions()));
native_module->AddCode(desc, 0, add.function_index(), 0, 0,
std::move(protected_instructions), source_pos,
WasmCode::kOther);
// Second run should now execute {sub}.
CHECK_EQ(4, r.Call(11, 7));
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment