Commit cb0bc43f authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

[Interpreter] Refactor bytecode register access.

Refactors bytecode register access to avoid having to deal with register indexes
directly.

 - Changes Load/StoreRegister to Load/StoreRegisterAtOperandIndex
 - Adds RegisterList abstraction for dealin with lists of registers
 - Adds helpers for Loading / Storing register pairs / triples.

Change-Id: I34427e4bd7314dce0230572212580d6a93ccc2d4
Reviewed-on: https://chromium-review.googlesource.com/887062Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50899}
parent 33b39dbf
......@@ -404,6 +404,7 @@ class SloppyTNode : public TNode<T> {
V(IntPtrEqual, BoolT, WordT, WordT) \
V(Uint32LessThan, BoolT, Word32T, Word32T) \
V(Uint32LessThanOrEqual, BoolT, Word32T, Word32T) \
V(Uint32GreaterThan, BoolT, Word32T, Word32T) \
V(Uint32GreaterThanOrEqual, BoolT, Word32T, Word32T) \
V(UintPtrLessThan, BoolT, WordT, WordT) \
V(UintPtrLessThanOrEqual, BoolT, WordT, WordT) \
......
......@@ -200,6 +200,17 @@ bool Bytecodes::IsRegisterOperandType(OperandType operand_type) {
return false;
}
// static
bool Bytecodes::IsRegisterListOperandType(OperandType operand_type) {
switch (operand_type) {
case OperandType::kRegList:
case OperandType::kRegOutList:
return true;
default:
return false;
}
}
bool Bytecodes::MakesCallAlongCriticalPath(Bytecode bytecode) {
if (IsCallOrConstruct(bytecode) || IsCallRuntime(bytecode)) return true;
switch (bytecode) {
......
......@@ -820,6 +820,9 @@ class V8_EXPORT_PRIVATE Bytecodes final : public AllStatic {
// Returns true if |operand_type| represents a register used as an output.
static bool IsRegisterOutputOperandType(OperandType operand_type);
// Returns true if |operand_type| represents a register list operand.
static bool IsRegisterListOperandType(OperandType operand_type);
// Returns true if the handler for |bytecode| should look ahead and inline a
// dispatch to a Star bytecode.
static bool IsStarLookahead(Bytecode bytecode, OperandScale operand_scale);
......
This diff is collapsed.
......@@ -86,31 +86,58 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
void GotoIfHasContextExtensionUpToDepth(compiler::Node* context,
compiler::Node* depth, Label* target);
// A RegListNodePair provides an abstraction over lists of registers.
class RegListNodePair {
public:
RegListNodePair(Node* base_reg_location, Node* reg_count)
: base_reg_location_(base_reg_location), reg_count_(reg_count) {}
compiler::Node* reg_count() const { return reg_count_; }
compiler::Node* base_reg_location() const { return base_reg_location_; }
private:
compiler::Node* base_reg_location_;
compiler::Node* reg_count_;
};
// Backup/restore register file to/from a fixed array of the correct length.
compiler::Node* ExportRegisterFile(compiler::Node* array,
compiler::Node* register_count);
const RegListNodePair& registers);
compiler::Node* ImportRegisterFile(compiler::Node* array,
compiler::Node* register_count);
const RegListNodePair& registers);
// Loads from and stores to the interpreter register file.
compiler::Node* LoadRegister(Register reg);
compiler::Node* LoadRegister(compiler::Node* reg_index);
compiler::Node* LoadAndUntagRegister(Register reg);
compiler::Node* StoreRegister(compiler::Node* value, Register reg);
compiler::Node* StoreRegister(compiler::Node* value,
compiler::Node* reg_index);
compiler::Node* StoreAndTagRegister(compiler::Node* value, Register reg);
// Returns the next consecutive register.
compiler::Node* NextRegister(compiler::Node* reg_index);
// Returns the location in memory of the register |reg_index| in the
// interpreter register file.
compiler::Node* RegisterLocation(compiler::Node* reg_index);
compiler::Node* LoadRegisterAtOperandIndex(int operand_index);
std::pair<compiler::Node*, compiler::Node*> LoadRegisterPairAtOperandIndex(
int operand_index);
void StoreRegister(compiler::Node* value, Register reg);
void StoreAndTagRegister(compiler::Node* value, Register reg);
void StoreRegisterAtOperandIndex(compiler::Node* value, int operand_index);
void StoreRegisterPairAtOperandIndex(compiler::Node* value1,
compiler::Node* value2,
int operand_index);
void StoreRegisterTripleAtOperandIndex(compiler::Node* value1,
compiler::Node* value2,
compiler::Node* value3,
int operand_index);
RegListNodePair GetRegisterListAtOperandIndex(int operand_index);
Node* LoadRegisterFromRegisterList(const RegListNodePair& reg_list,
int index);
Node* RegisterLocationInRegisterList(const RegListNodePair& reg_list,
int index);
// Load constant at the index specified in operand |operand_index| from the
// constant pool.
compiler::Node* LoadConstantPoolEntryAtOperandIndex(int operand_index);
// Load and untag constant at the index specified in operand |operand_index|
// from the constant pool.
compiler::Node* LoadAndUntagConstantPoolEntryAtOperandIndex(
int operand_index);
// Load constant at |index| in the constant pool.
compiler::Node* LoadConstantPoolEntry(compiler::Node* index);
// Load and untag constant at |index| in the constant pool.
compiler::Node* LoadAndUntagConstantPoolEntry(compiler::Node* index);
......@@ -135,12 +162,11 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
compiler::Node* feedback_vector,
compiler::Node* slot_id);
// Call JSFunction or Callable |function| with |arg_count| arguments (not
// including receiver) and the first argument located at |first_arg|, possibly
// Call JSFunction or Callable |function| with |args| arguments, possibly
// including the receiver depending on |receiver_mode|. After the call returns
// directly dispatches to the next bytecode.
void CallJSAndDispatch(compiler::Node* function, compiler::Node* context,
compiler::Node* first_arg, compiler::Node* arg_count,
const RegListNodePair& args,
ConvertReceiverMode receiver_mode);
// Call JSFunction or Callable |function| with |arg_count| arguments (not
......@@ -151,46 +177,41 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
void CallJSAndDispatch(Node* function, Node* context, Node* arg_count,
ConvertReceiverMode receiver_mode, TArgs... args);
// Call JSFunction or Callable |function| with |arg_count|
// arguments (not including receiver) and the first argument
// located at |first_arg|, and the final argument being spread. After the call
// returns directly dispatches to the next bytecode.
// Call JSFunction or Callable |function| with |args|
// arguments (not including receiver), and the final argument being spread.
// After the call returns directly dispatches to the next bytecode.
void CallJSWithSpreadAndDispatch(compiler::Node* function,
compiler::Node* context,
compiler::Node* first_arg,
compiler::Node* arg_count,
const RegListNodePair& args,
compiler::Node* slot_id,
compiler::Node* feedback_vector);
// Call constructor |target| with |arg_count| arguments (not
// including receiver) and the first argument located at
// |first_arg|. The |new_target| is the same as the
// |target| for the new keyword, but differs for the super
// keyword.
// Call constructor |target| with |args| arguments (not including receiver).
// The |new_target| is the same as the |target| for the new keyword, but
// differs for the super keyword.
compiler::Node* Construct(compiler::Node* target, compiler::Node* context,
compiler::Node* new_target,
compiler::Node* first_arg,
compiler::Node* arg_count, compiler::Node* slot_id,
const RegListNodePair& args,
compiler::Node* slot_id,
compiler::Node* feedback_vector);
// Call constructor |target| with |arg_count| arguments (not including
// receiver) and the first argument located at |first_arg|. The last argument
// is always a spread. The |new_target| is the same as the |target| for
// the new keyword, but differs for the super keyword.
// Call constructor |target| with |args| arguments (not including
// receiver). The last argument is always a spread. The |new_target| is the
// same as the |target| for the new keyword, but differs for the super
// keyword.
compiler::Node* ConstructWithSpread(compiler::Node* target,
compiler::Node* context,
compiler::Node* new_target,
compiler::Node* first_arg,
compiler::Node* arg_count,
const RegListNodePair& args,
compiler::Node* slot_id,
compiler::Node* feedback_vector);
// Call runtime function with |arg_count| arguments and the first argument
// located at |first_arg|.
// Call runtime function with |args| arguments which will return |return_size|
// number of values.
compiler::Node* CallRuntimeN(compiler::Node* function_id,
compiler::Node* context,
compiler::Node* first_arg,
compiler::Node* arg_count, int return_size = 1);
const RegListNodePair& args,
int return_size = 1);
// Jump forward relative to the current bytecode by the |jump_offset|.
compiler::Node* Jump(compiler::Node* jump_offset);
......@@ -264,6 +285,13 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
// interpreted.
compiler::Node* GetInterpretedFramePointer();
// Operations on registers.
compiler::Node* RegisterLocation(Register reg);
compiler::Node* RegisterLocation(compiler::Node* reg_index);
compiler::Node* NextRegister(compiler::Node* reg_index);
compiler::Node* LoadRegister(Node* reg_index);
void StoreRegister(compiler::Node* value, compiler::Node* reg_index);
// Saves and restores interpreter bytecode offset to the interpreter stack
// frame when performing a call.
void CallPrologue();
......
This diff is collapsed.
......@@ -5,6 +5,8 @@
#ifndef V8_INTERPRETER_INTERPRETER_INTRINSICS_GENERATOR_H_
#define V8_INTERPRETER_INTERPRETER_INTRINSICS_GENERATOR_H_
#include "src/interpreter/interpreter-assembler.h"
namespace v8 {
namespace internal {
......@@ -14,13 +16,9 @@ class Node;
namespace interpreter {
class InterpreterAssembler;
extern compiler::Node* GenerateInvokeIntrinsic(InterpreterAssembler* assembler,
compiler::Node* function_id,
compiler::Node* context,
compiler::Node* first_arg_reg,
compiler::Node* arg_count);
extern compiler::Node* GenerateInvokeIntrinsic(
InterpreterAssembler* assembler, compiler::Node* function_id,
compiler::Node* context, const InterpreterAssembler::RegListNodePair& args);
} // namespace interpreter
} // namespace internal
......
......@@ -385,51 +385,6 @@ TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
}
}
TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
InterpreterAssemblerTestState state(this, bytecode);
InterpreterAssemblerForTest m(&state, bytecode);
Node* reg_index_node = m.Parameter(0);
Node* reg_location_node = m.RegisterLocation(reg_index_node);
EXPECT_THAT(
reg_location_node,
c::IsIntPtrAdd(c::IsLoadParentFramePointer(),
c::IsWordShl(reg_index_node,
c::IsIntPtrConstant(kPointerSizeLog2))));
}
}
TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
InterpreterAssemblerTestState state(this, bytecode);
InterpreterAssemblerForTest m(&state, bytecode);
Node* reg_index_node = m.Parameter(0);
Node* load_reg_node = m.LoadRegister(reg_index_node);
EXPECT_THAT(
load_reg_node,
m.IsLoad(MachineType::AnyTagged(), c::IsLoadParentFramePointer(),
c::IsWordShl(reg_index_node,
c::IsIntPtrConstant(kPointerSizeLog2))));
}
}
TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
InterpreterAssemblerTestState state(this, bytecode);
InterpreterAssemblerForTest m(&state, bytecode);
Node* store_value = m.Int32Constant(0xDEADBEEF);
Node* reg_index_node = m.Parameter(0);
Node* store_reg_node = m.StoreRegister(store_value, reg_index_node);
EXPECT_THAT(store_reg_node,
m.IsStore(c::StoreRepresentation(MachineRepresentation::kTagged,
kNoWriteBarrier),
c::IsLoadParentFramePointer(),
c::IsWordShl(reg_index_node,
c::IsIntPtrConstant(kPointerSizeLog2)),
store_value));
}
}
TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
InterpreterAssemblerTestState state(this, bytecode);
......@@ -504,8 +459,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) {
CodeFactory::InterpreterCEntry(isolate(), result_size);
Node* function_id = m.Int32Constant(0);
Node* first_arg = m.IntPtrConstant(1);
Node* arg_count = m.Int32Constant(2);
InterpreterAssembler::RegListNodePair registers(m.IntPtrConstant(1),
m.Int32Constant(2));
Node* context = m.IntPtrConstant(4);
Matcher<Node*> function_table = c::IsExternalConstant(
......@@ -518,11 +473,13 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) {
m.IsLoad(MachineType::Pointer(), function,
c::IsIntPtrConstant(offsetof(Runtime::Function, entry)));
Node* call_runtime = m.CallRuntimeN(function_id, context, first_arg,
arg_count, result_size);
EXPECT_THAT(call_runtime,
c::IsCall(_, c::IsHeapConstant(builtin.code()), arg_count,
first_arg, function_entry, context, _, _));
Node* call_runtime =
m.CallRuntimeN(function_id, context, registers, result_size);
EXPECT_THAT(
call_runtime,
c::IsCall(_, c::IsHeapConstant(builtin.code()),
registers.reg_count(), registers.base_reg_location(),
function_entry, context, _, _));
}
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment