Commit 5f603e83 authored by klaasb's avatar klaasb Committed by Commit bot

[interpreter] Inline Star on dispatch for some bytecodes

For some bytecodes it is beneficial to always look for a Star
bytecode when dispatching to the next and inline perform it
without dispatching to the Star handler.

BUG=v8:4280
LOG=N

Review-Url: https://codereview.chromium.org/2142273003
Cr-Commit-Position: refs/heads/master@{#37904}
parent 3449dbc0
......@@ -571,6 +571,33 @@ bool Bytecodes::IsRegisterOutputOperandType(OperandType operand_type) {
return false;
}
// static
bool Bytecodes::IsStarLookahead(Bytecode bytecode, OperandScale operand_scale) {
if (operand_scale == OperandScale::kSingle) {
switch (bytecode) {
case Bytecode::kLdaZero:
case Bytecode::kLdaSmi:
case Bytecode::kLdaNull:
case Bytecode::kLdaTheHole:
case Bytecode::kLdaConstant:
case Bytecode::kAdd:
case Bytecode::kSub:
case Bytecode::kMul:
case Bytecode::kAddSmi:
case Bytecode::kSubSmi:
case Bytecode::kInc:
case Bytecode::kDec:
case Bytecode::kTypeOf:
case Bytecode::kCall:
case Bytecode::kNew:
return true;
default:
return false;
}
}
return false;
}
// static
int Bytecodes::GetNumberOfRegistersRepresentedBy(OperandType operand_type) {
switch (operand_type) {
......
......@@ -532,6 +532,10 @@ class Bytecodes final {
// Returns true if |operand_type| represents a register used as an output.
static bool IsRegisterOutputOperandType(OperandType operand_type);
// Returns true if the handler for |bytecode| should look ahead and inline a
// dispatch to a Star bytecode.
static bool IsStarLookahead(Bytecode bytecode, OperandScale operand_scale);
// Returns the number of registers represented by a register operand. For
// instance, a RegPair represents two registers.
static int GetNumberOfRegistersRepresentedBy(OperandType operand_type);
......
......@@ -31,6 +31,7 @@ InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
Bytecodes::ReturnCount(bytecode)),
bytecode_(bytecode),
operand_scale_(operand_scale),
bytecode_offset_(this, MachineType::PointerRepresentation()),
interpreted_frame_pointer_(this, MachineType::PointerRepresentation()),
accumulator_(this, MachineRepresentation::kTagged),
accumulator_use_(AccumulatorUse::kNone),
......@@ -39,6 +40,8 @@ InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
stack_pointer_before_call_(nullptr) {
accumulator_.Bind(
Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
bytecode_offset_.Bind(
Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter));
if (FLAG_trace_ignition) {
TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
}
......@@ -83,7 +86,7 @@ void InterpreterAssembler::SetContext(Node* value) {
}
Node* InterpreterAssembler::BytecodeOffset() {
return Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter);
return bytecode_offset_.value();
}
Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
......@@ -659,17 +662,30 @@ void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
new_budget.value());
}
Node* InterpreterAssembler::Advance() {
return Advance(Bytecodes::Size(bytecode_, operand_scale_));
}
Node* InterpreterAssembler::Advance(int delta) {
return IntPtrAdd(BytecodeOffset(), IntPtrConstant(delta));
return Advance(IntPtrConstant(delta));
}
Node* InterpreterAssembler::Advance(Node* delta) {
return IntPtrAdd(BytecodeOffset(), delta);
if (FLAG_trace_ignition) {
TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
}
Node* next_offset = IntPtrAdd(BytecodeOffset(), delta);
bytecode_offset_.Bind(next_offset);
return next_offset;
}
Node* InterpreterAssembler::Jump(Node* delta) {
DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_));
UpdateInterruptBudget(delta);
return DispatchTo(Advance(delta));
Node* new_bytecode_offset = Advance(delta);
Node* target_bytecode = LoadBytecode(new_bytecode_offset);
return DispatchToBytecode(target_bytecode, new_bytecode_offset);
}
void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
......@@ -691,17 +707,66 @@ void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
JumpConditional(WordNotEqual(lhs, rhs), delta);
}
Node* InterpreterAssembler::Dispatch() {
return DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
Node* InterpreterAssembler::LoadBytecode(compiler::Node* bytecode_offset) {
Node* bytecode =
Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset);
if (kPointerSize == 8) {
bytecode = ChangeUint32ToUint64(bytecode);
}
return bytecode;
}
Node* InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
Node* target_bytecode = Load(
MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
if (kPointerSize == 8) {
target_bytecode = ChangeUint32ToUint64(target_bytecode);
Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
Label do_inline_star(this), done(this);
Variable var_bytecode(this, MachineRepresentation::kWord8);
var_bytecode.Bind(target_bytecode);
Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
Node* is_star = WordEqual(target_bytecode, star_bytecode);
BranchIf(is_star, &do_inline_star, &done);
Bind(&do_inline_star);
{
InlineStar();
var_bytecode.Bind(LoadBytecode(BytecodeOffset()));
Goto(&done);
}
Bind(&done);
return var_bytecode.value();
}
void InterpreterAssembler::InlineStar() {
Bytecode previous_bytecode = bytecode_;
AccumulatorUse previous_acc_use = accumulator_use_;
bytecode_ = Bytecode::kStar;
accumulator_use_ = AccumulatorUse::kNone;
if (FLAG_trace_ignition) {
TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
}
StoreRegister(GetAccumulator(), BytecodeOperandReg(0));
DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
Advance();
bytecode_ = previous_bytecode;
accumulator_use_ = previous_acc_use;
}
Node* InterpreterAssembler::Dispatch() {
Node* target_offset = Advance();
Node* target_bytecode = LoadBytecode(target_offset);
if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) {
target_bytecode = StarDispatchLookahead(target_bytecode);
}
return DispatchToBytecode(target_bytecode, BytecodeOffset());
}
Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode,
Node* new_bytecode_offset) {
if (FLAG_trace_ignition_dispatches) {
TraceBytecodeDispatch(target_bytecode);
}
......@@ -722,10 +787,6 @@ Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
Node* handler_entry, Node* bytecode_offset) {
if (FLAG_trace_ignition) {
TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
}
InterpreterDispatchDescriptor descriptor(isolate());
Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset,
BytecodeArrayTaggedPointer(), DispatchTableRawPointer()};
......@@ -741,11 +802,7 @@ void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
// Indices 256-511 correspond to bytecodes with operand_scale == 1
// Indices 512-767 correspond to bytecodes with operand_scale == 2
Node* next_bytecode_offset = Advance(1);
Node* next_bytecode = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
next_bytecode_offset);
if (kPointerSize == 8) {
next_bytecode = ChangeUint32ToUint64(next_bytecode);
}
Node* next_bytecode = LoadBytecode(next_bytecode_offset);
if (FLAG_trace_ignition_dispatches) {
TraceBytecodeDispatch(next_bytecode);
......
......@@ -231,13 +231,30 @@ class InterpreterAssembler : public CodeStubAssembler {
// JumpIfWordNotEqual.
void JumpConditional(compiler::Node* condition, compiler::Node* jump_offset);
// Returns BytecodeOffset() advanced by delta bytecodes. Note: this does not
// update BytecodeOffset() itself.
// Updates and returns BytecodeOffset() advanced by the current bytecode's
// size. Traces the exit of the current bytecode.
compiler::Node* Advance();
// Updates and returns BytecodeOffset() advanced by delta bytecodes.
// Traces the exit of the current bytecode.
compiler::Node* Advance(int delta);
compiler::Node* Advance(compiler::Node* delta);
// Starts next instruction dispatch at |new_bytecode_offset|.
compiler::Node* DispatchTo(compiler::Node* new_bytecode_offset);
// Load the bytecode at |bytecode_offset|.
compiler::Node* LoadBytecode(compiler::Node* bytecode_offset);
// Look ahead for Star and inline it in a branch. Returns a new target
// bytecode node for dispatch.
compiler::Node* StarDispatchLookahead(compiler::Node* target_bytecode);
// Build code for Star at the current BytecodeOffset() and Advance() to the
// next dispatch offset.
void InlineStar();
// Dispatch to |target_bytecode| at |new_bytecode_offset|.
// |target_bytecode| should be equivalent to loading from the offset.
compiler::Node* DispatchToBytecode(compiler::Node* target_bytecode,
compiler::Node* new_bytecode_offset);
// Dispatch to the bytecode handler with code offset |handler|.
compiler::Node* DispatchToBytecodeHandler(compiler::Node* handler,
......@@ -251,6 +268,7 @@ class InterpreterAssembler : public CodeStubAssembler {
Bytecode bytecode_;
OperandScale operand_scale_;
CodeStubAssembler::Variable bytecode_offset_;
CodeStubAssembler::Variable interpreted_frame_pointer_;
CodeStubAssembler::Variable accumulator_;
AccumulatorUse accumulator_use_;
......
......@@ -1662,6 +1662,7 @@ void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
__ CallRuntime(Runtime::kCreateObjectLiteral, context, closure,
literal_index, constant_elements, flags);
__ SetAccumulator(result);
// TODO(klaasb) build a single dispatch once the call is inlined
__ Dispatch();
}
}
......
......@@ -332,6 +332,32 @@ TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
IsWordShl(target_bytecode_matcher, IsIntPtrConstant(kPointerSizeLog2)));
if (interpreter::Bytecodes::IsStarLookahead(bytecode, operand_scale)) {
Matcher<Node*> after_lookahead_offset =
IsIntPtrAdd(next_bytecode_offset_matcher,
IsIntPtrConstant(interpreter::Bytecodes::Size(
Bytecode::kStar, operand_scale)));
next_bytecode_offset_matcher =
IsPhi(MachineType::PointerRepresentation(),
next_bytecode_offset_matcher, after_lookahead_offset, _);
Matcher<Node*> after_lookahead_bytecode = m.IsLoad(
MachineType::Uint8(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
after_lookahead_offset);
if (kPointerSize == 8) {
after_lookahead_bytecode =
IsChangeUint32ToUint64(after_lookahead_bytecode);
}
target_bytecode_matcher =
IsPhi(MachineRepresentation::kWord8, target_bytecode_matcher,
after_lookahead_bytecode, _);
code_target_matcher = m.IsLoad(
MachineType::Pointer(),
IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
IsWordShl(target_bytecode_matcher,
IsIntPtrConstant(kPointerSizeLog2)));
}
EXPECT_THAT(
tail_call_node,
IsTailCall(
......@@ -351,6 +377,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
int jump_offsets[] = {-9710, -77, 0, +3, +97109};
TRACED_FOREACH(int, jump_offset, jump_offsets) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
if (!interpreter::Bytecodes::IsJump(bytecode)) return;
InterpreterAssemblerForTest m(this, bytecode);
Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment