Commit 02b7373a authored by oth's avatar oth Committed by Commit bot

[interpreter] Introduce bytecode generation pipeline.

This change introduces a pipeline for the final stages of
bytecode generation.

The peephole optimizer is made distinct from the BytecodeArrayBuilder.

A new BytecodeArrayWriter is responsible for writing bytecode. It
also keeps track of the maximum register seen and offers a potentially
smaller frame size.

R=rmcilroy@chromium.org
LOG=N
BUG=v8:4280

Review-Url: https://codereview.chromium.org/1947403002
Cr-Commit-Position: refs/heads/master@{#36220}
parent fa7460ad
......@@ -1235,8 +1235,14 @@ v8_source_set("v8_base") {
"src/interpreter/bytecode-array-builder.h",
"src/interpreter/bytecode-array-iterator.cc",
"src/interpreter/bytecode-array-iterator.h",
"src/interpreter/bytecode-array-writer.cc",
"src/interpreter/bytecode-array-writer.h",
"src/interpreter/bytecode-generator.cc",
"src/interpreter/bytecode-generator.h",
"src/interpreter/bytecode-peephole-optimizer.cc",
"src/interpreter/bytecode-peephole-optimizer.h",
"src/interpreter/bytecode-pipeline.cc",
"src/interpreter/bytecode-pipeline.h",
"src/interpreter/bytecode-register-allocator.cc",
"src/interpreter/bytecode-register-allocator.h",
"src/interpreter/bytecode-traits.h",
......
......@@ -1423,6 +1423,8 @@ void BytecodeGraphBuilder::VisitIllegal() {
jsgraph()->Constant(kIllegalBytecode));
}
void BytecodeGraphBuilder::VisitNop() {}
void BytecodeGraphBuilder::SwitchToMergeEnvironment(int current_offset) {
if (merge_environments_[current_offset] != nullptr) {
if (environment() != nullptr) {
......
......@@ -304,6 +304,7 @@ DEFINE_BOOL(ignition_eager, true, "eagerly compile and parse with ignition")
DEFINE_BOOL(ignition_generators, false,
"enable experimental ignition support for generators")
DEFINE_STRING(ignition_filter, "*", "filter for ignition interpreter")
DEFINE_BOOL(ignition_peephole, true, "use ignition peephole optimizer")
DEFINE_BOOL(print_bytecode, false,
"print bytecode generated by ignition interpreter")
DEFINE_BOOL(trace_ignition, false,
......
This diff is collapsed.
......@@ -6,6 +6,7 @@
#define V8_INTERPRETER_BYTECODE_ARRAY_BUILDER_H_
#include "src/ast/ast.h"
#include "src/interpreter/bytecode-array-writer.h"
#include "src/interpreter/bytecode-register-allocator.h"
#include "src/interpreter/bytecodes.h"
#include "src/interpreter/constant-array-builder.h"
......@@ -21,6 +22,8 @@ class Isolate;
namespace interpreter {
class BytecodeLabel;
class BytecodeNode;
class BytecodePipelineStage;
class Register;
class BytecodeArrayBuilder final : public ZoneObject {
......@@ -262,26 +265,16 @@ class BytecodeArrayBuilder final : public ZoneObject {
void SetExpressionAsStatementPosition(Expression* expr);
// Accessors
Zone* zone() const { return zone_; }
TemporaryRegisterAllocator* temporary_register_allocator() {
return &temporary_allocator_;
}
const TemporaryRegisterAllocator* temporary_register_allocator() const {
return &temporary_allocator_;
}
Zone* zone() const { return zone_; }
void EnsureReturn();
static OperandScale OperandSizesToScale(
OperandSize size0, OperandSize size1 = OperandSize::kByte,
OperandSize size2 = OperandSize::kByte,
OperandSize size3 = OperandSize::kByte);
static OperandSize SizeForRegisterOperand(Register reg);
static OperandSize SizeForSignedOperand(int value);
static OperandSize SizeForUnsignedOperand(int value);
static OperandSize SizeForUnsignedOperand(size_t value);
static uint32_t RegisterOperand(Register reg);
static Register RegisterFromOperand(uint32_t operand);
static uint32_t SignedOperand(int value, OperandSize size);
......@@ -289,7 +282,6 @@ class BytecodeArrayBuilder final : public ZoneObject {
static uint32_t UnsignedOperand(size_t value);
private:
class PreviousBytecodeHelper;
friend class BytecodeRegisterAllocator;
static Bytecode BytecodeForBinaryOperation(Token::Value op);
......@@ -305,11 +297,7 @@ class BytecodeArrayBuilder final : public ZoneObject {
static Bytecode BytecodeForCall(TailCallMode tail_call_mode);
static Bytecode GetJumpWithConstantOperand(Bytecode jump_smi8_operand);
static Bytecode GetJumpWithToBoolean(Bytecode jump_smi8_operand);
template <size_t N>
INLINE(void Output(Bytecode bytecode, uint32_t (&operands)[N],
OperandScale operand_scale = OperandScale::kSingle));
void Output(Bytecode bytecode);
void OutputScaled(Bytecode bytecode, OperandScale operand_scale,
uint32_t operand0, uint32_t operand1, uint32_t operand2,
......@@ -323,14 +311,13 @@ class BytecodeArrayBuilder final : public ZoneObject {
BytecodeArrayBuilder& OutputJump(Bytecode jump_bytecode,
BytecodeLabel* label);
void PatchJump(const ZoneVector<uint8_t>::iterator& jump_target,
const ZoneVector<uint8_t>::iterator& jump_location);
void PatchIndirectJumpWith8BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta);
void PatchIndirectJumpWith16BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta);
void PatchIndirectJumpWith32BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta);
void PatchJump(size_t jump_target, size_t jump_location);
void PatchJumpWith8BitOperand(ZoneVector<uint8_t>* bytecodes,
size_t jump_location, int delta);
void PatchJumpWith16BitOperand(ZoneVector<uint8_t>* bytecodes,
size_t jump_location, int delta);
void PatchJumpWith32BitOperand(ZoneVector<uint8_t>* bytecodes,
size_t jump_location, int delta);
void LeaveBasicBlock();
......@@ -338,9 +325,8 @@ class BytecodeArrayBuilder final : public ZoneObject {
int operand_index, uint32_t operand_value) const;
bool RegisterIsValid(Register reg, OperandSize reg_size) const;
bool LastBytecodeInSameBlock() const;
bool NeedToBooleanCast();
bool IsRegisterInAccumulator(Register reg);
// Attach latest source position to |node|.
void AttachSourceInfo(BytecodeNode* node);
// Set position for return.
void SetReturnPosition();
......@@ -348,9 +334,11 @@ class BytecodeArrayBuilder final : public ZoneObject {
// Gets a constant pool entry for the |object|.
size_t GetConstantPoolEntry(Handle<Object> object);
ZoneVector<uint8_t>* bytecodes() { return &bytecodes_; }
const ZoneVector<uint8_t>* bytecodes() const { return &bytecodes_; }
Isolate* isolate() const { return isolate_; }
BytecodeArrayWriter* bytecode_array_writer() {
return &bytecode_array_writer_;
}
BytecodePipelineStage* pipeline() { return pipeline_; }
ConstantArrayBuilder* constant_array_builder() {
return &constant_array_builder_;
}
......@@ -366,13 +354,10 @@ class BytecodeArrayBuilder final : public ZoneObject {
Isolate* isolate_;
Zone* zone_;
ZoneVector<uint8_t> bytecodes_;
bool bytecode_generated_;
ConstantArrayBuilder constant_array_builder_;
HandlerTableBuilder handler_table_builder_;
SourcePositionTableBuilder source_position_table_builder_;
size_t last_block_end_;
size_t last_bytecode_start_;
bool exit_seen_in_block_;
int unbound_jumps_;
int parameter_count_;
......@@ -380,6 +365,9 @@ class BytecodeArrayBuilder final : public ZoneObject {
int context_register_count_;
int return_position_;
TemporaryRegisterAllocator temporary_allocator_;
BytecodeArrayWriter bytecode_array_writer_;
BytecodePipelineStage* pipeline_;
BytecodeSourceInfo latest_source_info_;
DISALLOW_COPY_AND_ASSIGN(BytecodeArrayBuilder);
};
......
......@@ -128,26 +128,15 @@ Register BytecodeArrayIterator::GetRegisterOperand(int operand_index) const {
}
int BytecodeArrayIterator::GetRegisterOperandRange(int operand_index) const {
interpreter::OperandType operand_type =
Bytecodes::GetOperandType(current_bytecode(), operand_index);
DCHECK(Bytecodes::IsRegisterOperandType(operand_type));
switch (operand_type) {
case OperandType::kRegPair:
case OperandType::kRegOutPair:
return 2;
case OperandType::kRegOutTriple:
return 3;
default: {
if (operand_index + 1 !=
Bytecodes::NumberOfOperands(current_bytecode())) {
OperandType next_operand_type =
Bytecodes::GetOperandType(current_bytecode(), operand_index + 1);
if (OperandType::kRegCount == next_operand_type) {
return GetRegisterCountOperand(operand_index + 1);
}
}
return 1;
}
DCHECK_LE(operand_index, Bytecodes::NumberOfOperands(current_bytecode()));
const OperandType* operand_types =
Bytecodes::GetOperandTypes(current_bytecode());
DCHECK(Bytecodes::IsRegisterOperandType(operand_types[operand_index]));
if (operand_types[operand_index + 1] == OperandType::kRegCount) {
return GetRegisterCountOperand(operand_index + 1);
} else {
OperandType operand_type = operand_types[operand_index];
return Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type);
}
}
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interpreter/bytecode-array-writer.h"
#include <iomanip>
#include "src/interpreter/source-position-table.h"
namespace v8 {
namespace internal {
namespace interpreter {
BytecodeArrayWriter::BytecodeArrayWriter(
Zone* zone, SourcePositionTableBuilder* source_position_table_builder)
: bytecodes_(zone),
max_register_count_(0),
source_position_table_builder_(source_position_table_builder) {}
// override
BytecodeArrayWriter::~BytecodeArrayWriter() {}
// override
size_t BytecodeArrayWriter::FlushForOffset() { return bytecodes()->size(); }
// override
void BytecodeArrayWriter::Write(BytecodeNode* node) {
UpdateSourcePositionTable(node);
EmitBytecode(node);
}
void BytecodeArrayWriter::UpdateSourcePositionTable(
const BytecodeNode* const node) {
int bytecode_offset = static_cast<int>(bytecodes()->size());
const BytecodeSourceInfo& source_info = node->source_info();
if (source_info.is_valid()) {
source_position_table_builder_->AddPosition(bytecode_offset,
source_info.source_position(),
source_info.is_statement());
}
}
void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
OperandScale operand_scale = node->operand_scale();
if (operand_scale != OperandScale::kSingle) {
Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
bytecodes()->push_back(Bytecodes::ToByte(prefix));
}
Bytecode bytecode = node->bytecode();
bytecodes()->push_back(Bytecodes::ToByte(bytecode));
int register_operand_bitmap = Bytecodes::GetRegisterOperandBitmap(bytecode);
const uint32_t* const operands = node->operands();
const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
for (int i = 0; operand_types[i] != OperandType::kNone; ++i) {
OperandType operand_type = operand_types[i];
switch (Bytecodes::SizeOfOperand(operand_type, operand_scale)) {
case OperandSize::kNone:
UNREACHABLE();
break;
case OperandSize::kByte:
bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
break;
case OperandSize::kShort: {
uint8_t operand_bytes[2];
WriteUnalignedUInt16(operand_bytes, operands[i]);
bytecodes()->insert(bytecodes()->end(), operand_bytes,
operand_bytes + 2);
break;
}
case OperandSize::kQuad: {
uint8_t operand_bytes[4];
WriteUnalignedUInt32(operand_bytes, operands[i]);
bytecodes()->insert(bytecodes()->end(), operand_bytes,
operand_bytes + 4);
break;
}
}
if ((register_operand_bitmap >> i) & 1) {
int count;
if (operand_types[i + 1] == OperandType::kRegCount) {
count = static_cast<int>(operands[i + 1]);
} else {
count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type);
}
Register reg = Register::FromOperand(static_cast<int32_t>(operands[i]));
max_register_count_ = std::max(max_register_count_, reg.index() + count);
}
}
}
// override
void BytecodeArrayWriter::FlushBasicBlock() {}
int BytecodeArrayWriter::GetMaximumFrameSizeUsed() {
return max_register_count_ * kPointerSize;
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_INTERPRETER_BYTECODE_ARRAY_WRITER_H_
#define V8_INTERPRETER_BYTECODE_ARRAY_WRITER_H_
#include "src/interpreter/bytecode-pipeline.h"
namespace v8 {
namespace internal {
namespace interpreter {
class SourcePositionTableBuilder;
// Class for emitting bytecode as the final stage of the bytecode
// generation pipeline.
class BytecodeArrayWriter final : public BytecodePipelineStage {
public:
BytecodeArrayWriter(
Zone* zone, SourcePositionTableBuilder* source_position_table_builder);
virtual ~BytecodeArrayWriter();
void Write(BytecodeNode* node) override;
size_t FlushForOffset() override;
void FlushBasicBlock() override;
// Get the bytecode vector.
ZoneVector<uint8_t>* bytecodes() { return &bytecodes_; }
// Returns the size in bytes of the frame associated with the
// bytecode written.
int GetMaximumFrameSizeUsed();
private:
void EmitBytecode(const BytecodeNode* const node);
void UpdateSourcePositionTable(const BytecodeNode* const node);
ZoneVector<uint8_t> bytecodes_;
int max_register_count_;
SourcePositionTableBuilder* source_position_table_builder_;
DISALLOW_COPY_AND_ASSIGN(BytecodeArrayWriter);
};
} // namespace interpreter
} // namespace internal
} // namespace v8
#endif // V8_INTERPRETER_BYTECODE_ARRAY_WRITER_H_
......@@ -655,6 +655,9 @@ void BytecodeGenerator::BuildIndexedJump(Register index, size_t start_index,
.CompareOperation(Token::Value::EQ_STRICT, index)
.JumpIfTrue(&(targets[i]));
}
// TODO(oth): This should be an abort via the runtime with a
// corresponding message., An illegal bytecode should never be
// emitted in valid bytecode.
builder()->Illegal(); // Should never get here.
}
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interpreter/bytecode-peephole-optimizer.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/objects-inl.h"
#include "src/objects.h"
namespace v8 {
namespace internal {
namespace interpreter {
BytecodePeepholeOptimizer::BytecodePeepholeOptimizer(
ConstantArrayBuilder* constant_array_builder,
BytecodePipelineStage* next_stage)
: constant_array_builder_(constant_array_builder),
next_stage_(next_stage),
last_(Bytecode::kNop),
last_is_valid_(false),
last_is_discardable_(false) {
// TODO(oth): Remove last_is_valid_ and use kIllegal for last_ when
// not invalid. Currently blocked on bytecode generator emitting
// kIllegal for entry not found in jump table.
}
void BytecodePeepholeOptimizer::InvalidateLast() { last_is_valid_ = false; }
bool BytecodePeepholeOptimizer::LastIsValid() const { return last_is_valid_; }
void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) {
last_.Clone(node);
last_is_valid_ = true;
last_is_discardable_ = true;
}
// override
size_t BytecodePeepholeOptimizer::FlushForOffset() {
size_t buffered_size = next_stage_->FlushForOffset();
if (LastIsValid()) {
if (last_.bytecode() == Bytecode::kNop &&
!last_.source_info().is_statement()) {
// The Nop can be dropped as it doesn't have a statement
// position for the debugger and doesn't have any effects by
// definition.
InvalidateLast();
} else {
buffered_size += last_.Size();
last_is_discardable_ = false;
}
}
return buffered_size;
}
// override
void BytecodePeepholeOptimizer::FlushBasicBlock() {
if (LastIsValid()) {
next_stage_->Write(&last_);
InvalidateLast();
}
next_stage_->FlushBasicBlock();
}
// override
void BytecodePeepholeOptimizer::Write(BytecodeNode* node) {
// Attempt optimization if there is an earlier node to optimize with.
if (LastIsValid()) {
node = Optimize(node);
// Only output the last node if it wasn't invalidated by the optimization.
if (LastIsValid()) {
next_stage_->Write(&last_);
InvalidateLast();
}
}
if (node != nullptr) {
SetLast(node);
}
}
Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand(
const BytecodeNode* const node, int index) const {
DCHECK_LE(index, node->operand_count());
DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx);
uint32_t index_operand = node->operand(0);
return constant_array_builder_->At(index_operand);
}
bool BytecodePeepholeOptimizer::LastBytecodePutsNameInAccumulator() const {
DCHECK(LastIsValid());
return (last_.bytecode() == Bytecode::kTypeOf ||
last_.bytecode() == Bytecode::kToName ||
(last_.bytecode() == Bytecode::kLdaConstant &&
GetConstantForIndexOperand(&last_, 0)->IsName()));
}
void BytecodePeepholeOptimizer::UpdateCurrentBytecode(BytecodeNode* current) {
// Conditional jumps with boolean conditions are emiitted in
// ToBoolean form by the bytecode array builder,
// i.e. JumpIfToBooleanTrue rather JumpIfTrue. The ToBoolean element
// can be removed if the previous bytecode put a boolean value in
// the accumulator.
if (Bytecodes::IsJumpIfToBoolean(current->bytecode()) &&
Bytecodes::WritesBooleanToAccumulator(last_.bytecode())) {
Bytecode jump = Bytecodes::GetJumpWithoutToBoolean(current->bytecode());
current->set_bytecode(jump, current->operand(0), current->operand_scale());
}
}
bool BytecodePeepholeOptimizer::CanElideCurrent(
const BytecodeNode* const current) const {
if (Bytecodes::IsLdarOrStar(last_.bytecode()) &&
Bytecodes::IsLdarOrStar(current->bytecode()) &&
current->operand(0) == last_.operand(0)) {
// Ldar and Star make the accumulator and register hold equivalent
// values. Only the first bytecode is needed if there's a sequence
// of back-to-back Ldar and Star bytecodes with the same operand.
return true;
} else if (current->bytecode() == Bytecode::kToName &&
LastBytecodePutsNameInAccumulator()) {
// If the previous bytecode ensured a name was in the accumulator,
// the type coercion ToName() can be elided.
return true;
} else {
// Additional candidates for eliding current:
// (i) ToNumber if the last puts a number in the accumulator.
return false;
}
}
bool BytecodePeepholeOptimizer::CanElideLast(
const BytecodeNode* const current) const {
if (!last_is_discardable_) {
return false;
}
if (last_.bytecode() == Bytecode::kNop) {
// Nop are placeholders for holding source position information
// and can be elided.
return true;
} else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) &&
Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) {
// The accumulator is invisible to the debugger. If there is a sequence of
// consecutive accumulator loads (that don't have side effects) then only
// the final load is potentially visible.
return true;
} else {
return false;
}
}
BytecodeNode* BytecodePeepholeOptimizer::Optimize(BytecodeNode* current) {
UpdateCurrentBytecode(current);
if (CanElideCurrent(current)) {
if (current->source_info().is_valid()) {
current->set_bytecode(Bytecode::kNop);
} else {
current = nullptr;
}
} else if (CanElideLast(current)) {
if (last_.source_info().is_valid()) {
current->source_info().Update(last_.source_info());
}
InvalidateLast();
}
return current;
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_INTERPRETER_BYTECODE_PEEPHOLE_OPTIMIZER_H_
#define V8_INTERPRETER_BYTECODE_PEEPHOLE_OPTIMIZER_H_
#include "src/interpreter/bytecode-pipeline.h"
namespace v8 {
namespace internal {
namespace interpreter {
class ConstantArrayBuilder;
// An optimization stage for performing peephole optimizations on
// generated bytecode. The optimizer may buffer one bytecode
// internally.
class BytecodePeepholeOptimizer final : public BytecodePipelineStage,
public ZoneObject {
public:
BytecodePeepholeOptimizer(ConstantArrayBuilder* constant_array_builder,
BytecodePipelineStage* next_stage);
void Write(BytecodeNode* node) override;
size_t FlushForOffset() override;
void FlushBasicBlock() override;
private:
BytecodeNode* Optimize(BytecodeNode* current);
void UpdateCurrentBytecode(BytecodeNode* const current);
bool CanElideCurrent(const BytecodeNode* const current) const;
bool CanElideLast(const BytecodeNode* const current) const;
void InvalidateLast();
bool LastIsValid() const;
void SetLast(const BytecodeNode* const node);
bool LastBytecodePutsNameInAccumulator() const;
Handle<Object> GetConstantForIndexOperand(const BytecodeNode* const node,
int index) const;
ConstantArrayBuilder* constant_array_builder_;
BytecodePipelineStage* next_stage_;
BytecodeNode last_;
bool last_is_valid_;
bool last_is_discardable_;
DISALLOW_COPY_AND_ASSIGN(BytecodePeepholeOptimizer);
};
} // namespace interpreter
} // namespace internal
} // namespace v8
#endif // V8_INTERPRETER_BYTECODE_PEEPHOLE_OPTIMIZER_H_
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interpreter/bytecode-pipeline.h"
#include <iomanip>
#include "src/interpreter/source-position-table.h"
namespace v8 {
namespace internal {
namespace interpreter {
void BytecodeSourceInfo::Update(const BytecodeSourceInfo& entry) {
DCHECK(entry.is_valid());
if (!is_valid() || (entry.is_statement() && !is_statement()) ||
(entry.is_statement() && is_statement() &&
entry.source_position() > source_position())) {
// Position is updated if any of the following conditions are met:
// (1) there is no existing position.
// (2) the incoming position is a statement and the current position
// is an expression.
// (3) the existing position is a statement and the incoming
// statement has a later source position.
// Condition 3 is needed for the first statement in a function which
// may end up with later statement positions being added during bytecode
// generation.
source_position_ = entry.source_position_;
is_statement_ = entry.is_statement_;
}
}
BytecodeNode::BytecodeNode(Bytecode bytecode) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
bytecode_ = bytecode;
operand_scale_ = OperandScale::kSingle;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
bytecode_ = bytecode;
operands_[0] = operand0;
operand_scale_ = operand_scale;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 2);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operand_scale_ = operand_scale;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 3);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operands_[2] = operand2;
operand_scale_ = operand_scale;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
uint32_t operand3, OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 4);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operands_[2] = operand2;
operands_[3] = operand3;
operand_scale_ = operand_scale;
}
void BytecodeNode::set_bytecode(Bytecode bytecode) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
bytecode_ = bytecode;
operand_scale_ = OperandScale::kSingle;
}
void BytecodeNode::set_bytecode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
bytecode_ = bytecode;
operands_[0] = operand0;
operand_scale_ = operand_scale;
}
size_t BytecodeNode::Size() const {
size_t size = Bytecodes::Size(bytecode_, operand_scale_);
if (Bytecodes::OperandScaleRequiresPrefixBytecode(operand_scale_)) {
size += 1;
}
return size;
}
void BytecodeNode::Print(std::ostream& os) const {
#ifdef DEBUG
std::ios saved_state(nullptr);
saved_state.copyfmt(os);
os << Bytecodes::ToString(bytecode_);
if (Bytecodes::OperandScaleRequiresPrefixBytecode(operand_scale_)) {
Bytecode scale_prefix =
Bytecodes::OperandScaleToPrefixBytecode(operand_scale_);
os << '.' << Bytecodes::ToString(scale_prefix);
}
for (int i = 0; i < operand_count(); ++i) {
os << ' ' << std::setw(8) << std::setfill('0') << std::hex << operands_[i];
}
os.copyfmt(saved_state);
if (source_info_.is_valid()) {
os << source_info_;
}
os << '\n';
#else
os << static_cast<const void*>(this);
#endif // DEBUG
}
void BytecodeNode::Clone(const BytecodeNode* const other) {
memcpy(this, other, sizeof(*other));
}
bool BytecodeNode::operator==(const BytecodeNode& other) const {
if (this == &other) {
return true;
} else if (this->bytecode() != other.bytecode() ||
this->source_info() != other.source_info()) {
return false;
} else {
for (int i = 0; i < this->operand_count(); ++i) {
if (this->operand(i) != other.operand(i)) {
return false;
}
}
}
return true;
}
std::ostream& operator<<(std::ostream& os, const BytecodeNode& node) {
node.Print(os);
return os;
}
std::ostream& operator<<(std::ostream& os, const BytecodeSourceInfo& info) {
if (info.is_valid()) {
char description = info.is_statement() ? 'S' : 'E';
os << info.source_position() << ' ' << description << '>';
}
return os;
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_INTERPRETER_BYTECODE_PIPELINE_H_
#define V8_INTERPRETER_BYTECODE_PIPELINE_H_
#include "src/interpreter/bytecode-register-allocator.h"
#include "src/interpreter/bytecodes.h"
#include "src/zone-containers.h"
namespace v8 {
namespace internal {
namespace interpreter {
class BytecodeNode;
class BytecodeSourceInfo;
// Interface for bytecode pipeline stages.
class BytecodePipelineStage {
public:
virtual ~BytecodePipelineStage() {}
// Write bytecode node |node| into pipeline. The node is only valid
// for the duration of the call. Callee's should clone it if
// deferring Write() to the next stage.
virtual void Write(BytecodeNode* node) = 0;
// Flush state for bytecode array offset calculation. Returns the
// current size of bytecode array.
virtual size_t FlushForOffset() = 0;
// Flush state to terminate basic block.
virtual void FlushBasicBlock() = 0;
};
// Source code position information.
class BytecodeSourceInfo final {
public:
static const int kUninitializedPosition = -1;
BytecodeSourceInfo(int position = kUninitializedPosition,
bool is_statement = false)
: source_position_(position), is_statement_(is_statement) {}
// Combine later source info with current.
void Update(const BytecodeSourceInfo& entry);
int source_position() const {
DCHECK(is_valid());
return source_position_;
}
bool is_statement() const { return is_valid() && is_statement_; }
bool is_valid() const { return source_position_ != kUninitializedPosition; }
void set_invalid() { source_position_ = kUninitializedPosition; }
bool operator==(const BytecodeSourceInfo& other) const {
return source_position_ == other.source_position_ &&
is_statement_ == other.is_statement_;
}
bool operator!=(const BytecodeSourceInfo& other) const {
return source_position_ != other.source_position_ ||
is_statement_ != other.is_statement_;
}
private:
int source_position_;
bool is_statement_;
DISALLOW_COPY_AND_ASSIGN(BytecodeSourceInfo);
};
// A container for a generated bytecode, it's operands, and source information.
// These must be allocated by a BytecodeNodeAllocator instance.
class BytecodeNode final : ZoneObject {
public:
explicit BytecodeNode(Bytecode bytecode = Bytecode::kIllegal);
BytecodeNode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
OperandScale operand_scale);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, OperandScale operand_scale);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3,
OperandScale operand_scale);
void set_bytecode(Bytecode bytecode);
void set_bytecode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale);
// Clone |other|.
void Clone(const BytecodeNode* const other);
// Print to stream |os|.
void Print(std::ostream& os) const;
// Return the size when this node is serialized to a bytecode array.
size_t Size() const;
Bytecode bytecode() const { return bytecode_; }
uint32_t operand(int i) const {
DCHECK_LT(i, operand_count());
return operands_[i];
}
uint32_t* operands() { return operands_; }
const uint32_t* operands() const { return operands_; }
int operand_count() const { return Bytecodes::NumberOfOperands(bytecode_); }
OperandScale operand_scale() const { return operand_scale_; }
const BytecodeSourceInfo& source_info() const { return source_info_; }
BytecodeSourceInfo& source_info() { return source_info_; }
bool operator==(const BytecodeNode& other) const;
bool operator!=(const BytecodeNode& other) const { return !(*this == other); }
private:
static const int kInvalidPosition = kMinInt;
static const size_t kMaxOperands = 4;
Bytecode bytecode_;
uint32_t operands_[kMaxOperands];
OperandScale operand_scale_;
BytecodeSourceInfo source_info_;
};
std::ostream& operator<<(std::ostream& os, const BytecodeSourceInfo& info);
std::ostream& operator<<(std::ostream& os, const BytecodeNode& node);
} // namespace interpreter
} // namespace internal
} // namespace v8
#endif // V8_INTERPRETER_BYTECODE_PIPELINE_H_
......@@ -41,6 +41,28 @@ struct OperandTraits {
OPERAND_TYPE_LIST(DECLARE_OPERAND_TYPE_TRAITS)
#undef DECLARE_OPERAND_TYPE_TRAITS
template <OperandType operand_type, OperandScale operand_scale>
struct OperandScaler {
template <bool, OperandSize, OperandScale>
struct Helper {
static const int kSize = 0;
};
template <OperandSize size, OperandScale scale>
struct Helper<false, size, scale> {
static const int kSize = static_cast<int>(size);
};
template <OperandSize size, OperandScale scale>
struct Helper<true, size, scale> {
static const int kSize = static_cast<int>(size) * static_cast<int>(scale);
};
static const int kSize =
Helper<OperandTraits<operand_type>::TypeInfo::kIsScalable,
OperandTraits<operand_type>::TypeInfo::kUnscaledSize,
operand_scale>::kSize;
static const OperandSize kOperandSize = static_cast<OperandSize>(kSize);
};
template <OperandType>
struct RegisterOperandTraits {
static const int kIsRegisterOperand = 0;
......@@ -61,11 +83,30 @@ template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1, OperandType operand_2, OperandType operand_3>
struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2,
operand_3> {
static OperandType GetOperandType(int i) {
DCHECK(0 <= i && i < kOperandCount);
const OperandType kOperands[] = {operand_0, operand_1, operand_2,
operand_3};
return kOperands[i];
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
operand_3, OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_1, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_2, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_3, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -98,10 +139,29 @@ struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2,
template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1, OperandType operand_2>
struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2> {
static inline OperandType GetOperandType(int i) {
DCHECK(0 <= i && i <= 2);
const OperandType kOperands[] = {operand_0, operand_1, operand_2};
return kOperands[i];
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_1, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_2, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -130,10 +190,28 @@ struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2> {
template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1>
struct BytecodeTraits<accumulator_use, operand_0, operand_1> {
static inline OperandType GetOperandType(int i) {
DCHECK(0 <= i && i < kOperandCount);
const OperandType kOperands[] = {operand_0, operand_1};
return kOperands[i];
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1,
OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_1, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -158,9 +236,26 @@ struct BytecodeTraits<accumulator_use, operand_0, operand_1> {
template <AccumulatorUse accumulator_use, OperandType operand_0>
struct BytecodeTraits<accumulator_use, operand_0> {
static inline OperandType GetOperandType(int i) {
DCHECK(i == 0);
return operand_0;
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -182,9 +277,14 @@ struct BytecodeTraits<accumulator_use, operand_0> {
template <AccumulatorUse accumulator_use>
struct BytecodeTraits<accumulator_use> {
static inline OperandType GetOperandType(int i) {
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
UNREACHABLE();
return OperandType::kNone;
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -200,37 +300,22 @@ struct BytecodeTraits<accumulator_use> {
static const int kRegisterOperandBitmap = 0;
};
template <bool>
struct OperandScaler {
static int Multiply(int size, int operand_scale) { return 0; }
};
template <>
struct OperandScaler<false> {
static int Multiply(int size, int operand_scale) { return size; }
};
template <>
struct OperandScaler<true> {
static int Multiply(int size, int operand_scale) {
return size * operand_scale;
}
};
static OperandSize ScaledOperandSize(OperandType operand_type,
OperandScale operand_scale) {
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int index = static_cast<int>(operand_scale) >> 1;
switch (operand_type) {
#define CASE(Name, TypeInfo) \
case OperandType::k##Name: { \
OperandSize base_size = OperandTypeInfoTraits<TypeInfo>::kUnscaledSize; \
int size = \
OperandScaler<OperandTypeInfoTraits<TypeInfo>::kIsScalable>::Multiply( \
static_cast<int>(base_size), static_cast<int>(operand_scale)); \
OperandSize operand_size = static_cast<OperandSize>(size); \
DCHECK(operand_size == OperandSize::kByte || \
operand_size == OperandSize::kShort || \
operand_size == OperandSize::kQuad); \
return operand_size; \
#define CASE(Name, TypeInfo) \
case OperandType::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<OperandType::k##Name, \
OperandScale::kSingle>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kDouble>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kQuadruple>::kOperandSize}; \
return kOperandSizes[index]; \
}
OPERAND_TYPE_LIST(CASE)
#undef CASE
......
......@@ -74,15 +74,13 @@ const char* Bytecodes::OperandTypeToString(OperandType operand_type) {
// static
const char* Bytecodes::OperandScaleToString(OperandScale operand_scale) {
switch (operand_scale) {
case OperandScale::kSingle:
return "Single";
case OperandScale::kDouble:
return "Double";
case OperandScale::kQuadruple:
return "Quadruple";
case OperandScale::kInvalid:
UNREACHABLE();
#define CASE(Name, _) \
case OperandScale::k##Name: \
return #Name;
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return "";
}
......@@ -240,25 +238,81 @@ bool Bytecodes::WritesAccumulator(Bytecode bytecode) {
AccumulatorUse::kWrite;
}
// static
bool Bytecodes::WritesBooleanToAccumulator(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kLogicalNot:
case Bytecode::kTestEqual:
case Bytecode::kTestNotEqual:
case Bytecode::kTestEqualStrict:
case Bytecode::kTestLessThan:
case Bytecode::kTestLessThanOrEqual:
case Bytecode::kTestGreaterThan:
case Bytecode::kTestGreaterThanOrEqual:
case Bytecode::kTestInstanceOf:
case Bytecode::kTestIn:
case Bytecode::kForInDone:
return true;
default:
return false;
}
}
// static
bool Bytecodes::IsAccumulatorLoadWithoutEffects(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kLdaZero:
case Bytecode::kLdaSmi:
case Bytecode::kLdaUndefined:
case Bytecode::kLdaNull:
case Bytecode::kLdaTheHole:
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kLdaConstant:
case Bytecode::kLdar:
return true;
default:
return false;
}
}
// static
OperandType Bytecodes::GetOperandType(Bytecode bytecode, int i) {
DCHECK_LE(bytecode, Bytecode::kLast);
DCHECK_LT(i, NumberOfOperands(bytecode));
DCHECK_GE(i, 0);
return GetOperandTypes(bytecode)[i];
}
// static
const OperandType* Bytecodes::GetOperandTypes(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::GetOperandType(i);
return BytecodeTraits<__VA_ARGS__>::GetOperandTypes();
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandType::kNone;
return nullptr;
}
// static
OperandSize Bytecodes::GetOperandSize(Bytecode bytecode, int i,
OperandScale operand_scale) {
OperandType op_type = GetOperandType(bytecode, i);
return ScaledOperandSize(op_type, operand_scale);
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::GetOperandSize(i, operand_scale);
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
// static
......@@ -279,6 +333,7 @@ int Bytecodes::GetRegisterOperandBitmap(Bytecode bytecode) {
// static
int Bytecodes::GetOperandOffset(Bytecode bytecode, int i,
OperandScale operand_scale) {
DCHECK_LT(i, Bytecodes::NumberOfOperands(bytecode));
// TODO(oth): restore this to a statically determined constant.
int offset = 1;
for (int operand_index = 0; operand_index < i; ++operand_index) {
......@@ -343,6 +398,31 @@ bool Bytecodes::IsJump(Bytecode bytecode) {
return IsJumpImmediate(bytecode) || IsJumpConstant(bytecode);
}
// static
bool Bytecodes::IsJumpIfToBoolean(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfToBooleanTrue ||
bytecode == Bytecode::kJumpIfToBooleanFalse ||
bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
bytecode == Bytecode::kJumpIfToBooleanFalseConstant;
}
// static
Bytecode Bytecodes::GetJumpWithoutToBoolean(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kJumpIfToBooleanTrue:
return Bytecode::kJumpIfTrue;
case Bytecode::kJumpIfToBooleanFalse:
return Bytecode::kJumpIfFalse;
case Bytecode::kJumpIfToBooleanTrueConstant:
return Bytecode::kJumpIfTrueConstant;
case Bytecode::kJumpIfToBooleanFalseConstant:
return Bytecode::kJumpIfFalseConstant;
default:
break;
}
UNREACHABLE();
return Bytecode::kIllegal;
}
// static
bool Bytecodes::IsCallOrNew(Bytecode bytecode) {
......@@ -370,6 +450,11 @@ bool Bytecodes::IsDebugBreak(Bytecode bytecode) {
return false;
}
// static
bool Bytecodes::IsLdarOrStar(Bytecode bytecode) {
return bytecode == Bytecode::kLdar || bytecode == Bytecode::kStar;
}
// static
bool Bytecodes::IsBytecodeWithScalableOperands(Bytecode bytecode) {
switch (bytecode) {
......@@ -460,6 +545,24 @@ bool Bytecodes::IsRegisterOutputOperandType(OperandType operand_type) {
return false;
}
// static
int Bytecodes::GetNumberOfRegistersRepresentedBy(OperandType operand_type) {
switch (operand_type) {
case OperandType::kMaybeReg:
case OperandType::kReg:
case OperandType::kRegOut:
return 1;
case OperandType::kRegPair:
case OperandType::kRegOutPair:
return 2;
case OperandType::kRegOutTriple:
return 3;
default:
UNREACHABLE();
}
return 0;
}
// static
bool Bytecodes::IsUnsignedOperandType(OperandType operand_type) {
switch (operand_type) {
......@@ -474,10 +577,62 @@ bool Bytecodes::IsUnsignedOperandType(OperandType operand_type) {
}
// static
OperandScale Bytecodes::NextOperandScale(OperandScale operand_scale) {
DCHECK(operand_scale >= OperandScale::kSingle &&
operand_scale <= OperandScale::kMaxValid);
return static_cast<OperandScale>(2 * static_cast<int>(operand_scale));
OperandSize Bytecodes::SizeForSignedOperand(int value) {
if (kMinInt8 <= value && value <= kMaxInt8) {
return OperandSize::kByte;
} else if (kMinInt16 <= value && value <= kMaxInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
// static
OperandSize Bytecodes::SizeForUnsignedOperand(int value) {
DCHECK_GE(value, 0);
if (value <= kMaxUInt8) {
return OperandSize::kByte;
} else if (value <= kMaxUInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
OperandSize Bytecodes::SizeForUnsignedOperand(size_t value) {
if (value <= static_cast<size_t>(kMaxUInt8)) {
return OperandSize::kByte;
} else if (value <= static_cast<size_t>(kMaxUInt16)) {
return OperandSize::kShort;
} else if (value <= kMaxUInt32) {
return OperandSize::kQuad;
} else {
UNREACHABLE();
return OperandSize::kQuad;
}
}
OperandScale Bytecodes::OperandSizesToScale(OperandSize size0,
OperandSize size1,
OperandSize size2,
OperandSize size3) {
OperandSize upper = std::max(size0, size1);
OperandSize lower = std::max(size2, size3);
OperandSize result = std::max(upper, lower);
// Operand sizes have been scaled before calling this function.
// Currently all scalable operands are byte sized at
// OperandScale::kSingle.
STATIC_ASSERT(static_cast<int>(OperandSize::kByte) ==
static_cast<int>(OperandScale::kSingle) &&
static_cast<int>(OperandSize::kShort) ==
static_cast<int>(OperandScale::kDouble) &&
static_cast<int>(OperandSize::kQuad) ==
static_cast<int>(OperandScale::kQuadruple));
OperandScale operand_scale = static_cast<OperandScale>(result);
DCHECK(operand_scale == OperandScale::kSingle ||
operand_scale == OperandScale::kDouble ||
operand_scale == OperandScale::kQuadruple);
return operand_scale;
}
// static
......
......@@ -98,7 +98,7 @@ namespace interpreter {
OperandType::kIdx) \
\
/* Context operations */ \
V(PushContext, AccumulatorUse::kRead, OperandType::kReg) \
V(PushContext, AccumulatorUse::kRead, OperandType::kRegOut) \
V(PopContext, AccumulatorUse::kNone, OperandType::kReg) \
V(LdaContextSlot, AccumulatorUse::kWrite, OperandType::kReg, \
OperandType::kIdx) \
......@@ -250,7 +250,11 @@ namespace interpreter {
DEBUG_BREAK_BYTECODE_LIST(V) \
\
/* Illegal bytecode (terminates execution) */ \
V(Illegal, AccumulatorUse::kNone)
V(Illegal, AccumulatorUse::kNone) \
\
/* No operation (used to maintain source positions for peephole */ \
/* eliminated bytecodes). */ \
V(Nop, AccumulatorUse::kNone)
enum class AccumulatorUse : uint8_t {
kNone = 0,
......@@ -271,12 +275,16 @@ V8_INLINE AccumulatorUse operator|(AccumulatorUse lhs, AccumulatorUse rhs) {
// Enumeration of scaling factors applicable to scalable operands. Code
// relies on being able to cast values to integer scaling values.
#define OPERAND_SCALE_LIST(V) \
V(Single, 1) \
V(Double, 2) \
V(Quadruple, 4)
enum class OperandScale : uint8_t {
kSingle = 1,
kDouble = 2,
kQuadruple = 4,
kMaxValid = kQuadruple,
kInvalid = 8,
#define DECLARE_OPERAND_SCALE(Name, Scale) k##Name = Scale,
OPERAND_SCALE_LIST(DECLARE_OPERAND_SCALE)
#undef DECLARE_OPERAND_SCALE
kLast = kQuadruple
};
// Enumeration of the size classes of operand types used by
......@@ -333,7 +341,7 @@ enum class Bytecode : uint8_t {
// An interpreter Register which is located in the function's Register file
// in its stack-frame. Register hold parameters, this, and expression values.
class Register {
class Register final {
public:
explicit Register(int index = kInvalidIndex) : index_(index) {}
......@@ -464,9 +472,20 @@ class Bytecodes {
// Returns true if |bytecode| writes the accumulator.
static bool WritesAccumulator(Bytecode bytecode);
// Return true if |bytecode| writes the accumulator with a boolean value.
static bool WritesBooleanToAccumulator(Bytecode bytecode);
// Return true if |bytecode| is an accumulator load bytecode,
// e.g. LdaConstant, LdaTrue, Ldar.
static bool IsAccumulatorLoadWithoutEffects(Bytecode bytecode);
// Returns the i-th operand of |bytecode|.
static OperandType GetOperandType(Bytecode bytecode, int i);
// Returns a pointer to an array of operand types terminated in
// OperandType::kNone.
static const OperandType* GetOperandTypes(Bytecode bytecode);
// Returns the size of the i-th operand of |bytecode|.
static OperandSize GetOperandSize(Bytecode bytecode, int i,
OperandScale operand_scale);
......@@ -514,6 +533,13 @@ class Bytecodes {
// any kind of operand.
static bool IsJump(Bytecode bytecode);
// Returns true if the bytecode is a jump that internally coerces the
// accumulator to a boolean.
static bool IsJumpIfToBoolean(Bytecode bytecode);
// Returns the equivalent jump bytecode without the accumulator coercion.
static Bytecode GetJumpWithoutToBoolean(Bytecode bytecode);
// Returns true if the bytecode is a conditional jump, a jump, or a return.
static bool IsJumpOrReturn(Bytecode bytecode);
......@@ -526,6 +552,9 @@ class Bytecodes {
// Returns true if the bytecode is a debug break.
static bool IsDebugBreak(Bytecode bytecode);
// Returns true if the bytecode is Ldar or Star.
static bool IsLdarOrStar(Bytecode bytecode);
// Returns true if the bytecode has wider operand forms.
static bool IsBytecodeWithScalableOperands(Bytecode bytecode);
......@@ -541,6 +570,10 @@ class Bytecodes {
// Returns true if |operand_type| represents a register used as an output.
static bool IsRegisterOutputOperandType(OperandType operand_type);
// Returns the number of registers represented by a register operand. For
// instance, a RegPair represents two registers.
static int GetNumberOfRegistersRepresentedBy(OperandType operand_type);
// Returns true if |operand_type| is a maybe register operand
// (kMaybeReg).
static bool IsMaybeRegisterOperandType(OperandType operand_type);
......@@ -576,8 +609,21 @@ class Bytecodes {
// OperandScale values.
static bool BytecodeHasHandler(Bytecode bytecode, OperandScale operand_scale);
// Return the next larger operand scale.
static OperandScale NextOperandScale(OperandScale operand_scale);
// Return the operand size required to hold a signed operand.
static OperandSize SizeForSignedOperand(int value);
// Return the operand size required to hold an unsigned operand.
static OperandSize SizeForUnsignedOperand(int value);
// Return the operand size required to hold an unsigned operand.
static OperandSize SizeForUnsignedOperand(size_t value);
// Return the OperandScale required for bytecode emission of
// operand sizes.
static OperandScale OperandSizesToScale(
OperandSize size0, OperandSize size1 = OperandSize::kByte,
OperandSize size2 = OperandSize::kByte,
OperandSize size3 = OperandSize::kByte);
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Bytecodes);
......
......@@ -45,9 +45,13 @@ void Interpreter::Initialize() {
}
// Generate bytecode handlers for all bytecodes and scales.
for (OperandScale operand_scale = OperandScale::kSingle;
operand_scale <= OperandScale::kMaxValid;
operand_scale = Bytecodes::NextOperandScale(operand_scale)) {
const OperandScale kOperandScales[] = {
#define VALUE(Name, _) OperandScale::k##Name,
OPERAND_SCALE_LIST(VALUE)
#undef VALUE
};
for (OperandScale operand_scale : kOperandScales) {
#define GENERATE_CODE(Name, ...) \
{ \
if (Bytecodes::BytecodeHasHandler(Bytecode::k##Name, operand_scale)) { \
......@@ -93,12 +97,16 @@ size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
OperandScale operand_scale) {
static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte;
size_t index = static_cast<size_t>(bytecode);
OperandScale current_scale = OperandScale::kSingle;
while (current_scale != operand_scale) {
index += kEntriesPerOperandScale;
current_scale = Bytecodes::NextOperandScale(current_scale);
switch (operand_scale) {
case OperandScale::kSingle:
return index;
case OperandScale::kDouble:
return index + kEntriesPerOperandScale;
case OperandScale::kQuadruple:
return index + 2 * kEntriesPerOperandScale;
}
return index;
UNREACHABLE();
return 0;
}
void Interpreter::IterateDispatchTable(ObjectVisitor* v) {
......@@ -1767,6 +1775,11 @@ void Interpreter::DoIllegal(InterpreterAssembler* assembler) {
__ Abort(kInvalidBytecode);
}
// Nop
//
// No operation.
void Interpreter::DoNop(InterpreterAssembler* assembler) { __ Dispatch(); }
// SuspendGenerator <generator>
//
// Exports the register file and stores it into the generator. Also stores the
......
......@@ -115,53 +115,34 @@ void DecodeEntry(ByteArray* bytes, int* index, PositionTableEntry* entry) {
} // namespace
void SourcePositionTableBuilder::AddStatementPosition(size_t bytecode_offset,
int source_position) {
void SourcePositionTableBuilder::AddPosition(size_t bytecode_offset,
int source_position,
bool is_statement) {
int offset = static_cast<int>(bytecode_offset);
AddEntry({offset, source_position, true});
}
void SourcePositionTableBuilder::AddExpressionPosition(size_t bytecode_offset,
int source_position) {
int offset = static_cast<int>(bytecode_offset);
AddEntry({offset, source_position, false});
AddEntry({offset, source_position, is_statement});
}
void SourcePositionTableBuilder::AddEntry(const PositionTableEntry& entry) {
// Don't encode a new entry if this bytecode already has a source position
// assigned.
if (candidate_.bytecode_offset == entry.bytecode_offset) {
if (entry.is_statement) candidate_ = entry;
return;
}
CommitEntry();
candidate_ = entry;
}
void SourcePositionTableBuilder::CommitEntry() {
if (candidate_.bytecode_offset == kUninitializedCandidateOffset) return;
PositionTableEntry tmp(candidate_);
PositionTableEntry tmp(entry);
SubtractFromEntry(tmp, previous_);
EncodeEntry(bytes_, tmp);
previous_ = candidate_;
previous_ = entry;
if (candidate_.is_statement) {
if (entry.is_statement) {
LOG_CODE_EVENT(isolate_, CodeLinePosInfoAddStatementPositionEvent(
jit_handler_data_, candidate_.bytecode_offset,
candidate_.source_position));
jit_handler_data_, entry.bytecode_offset,
entry.source_position));
}
LOG_CODE_EVENT(isolate_, CodeLinePosInfoAddPositionEvent(
jit_handler_data_, candidate_.bytecode_offset,
candidate_.source_position));
jit_handler_data_, entry.bytecode_offset,
entry.source_position));
#ifdef ENABLE_SLOW_DCHECKS
raw_entries_.push_back(candidate_);
raw_entries_.push_back(entry);
#endif
}
Handle<ByteArray> SourcePositionTableBuilder::ToSourcePositionTable() {
CommitEntry();
if (bytes_.empty()) return isolate_->factory()->empty_byte_array();
Handle<ByteArray> table = isolate_->factory()->NewByteArray(
......
......@@ -34,7 +34,7 @@ struct PositionTableEntry {
bool is_statement;
};
class SourcePositionTableBuilder : public PositionsRecorder {
class SourcePositionTableBuilder final : public PositionsRecorder {
public:
SourcePositionTableBuilder(Isolate* isolate, Zone* zone)
: isolate_(isolate),
......@@ -42,16 +42,14 @@ class SourcePositionTableBuilder : public PositionsRecorder {
#ifdef ENABLE_SLOW_DCHECKS
raw_entries_(zone),
#endif
candidate_(kUninitializedCandidateOffset, 0, false) {
previous_() {
}
void AddStatementPosition(size_t bytecode_offset, int source_position);
void AddExpressionPosition(size_t bytecode_offset, int source_position);
void AddPosition(size_t bytecode_offset, int source_position,
bool is_statement);
Handle<ByteArray> ToSourcePositionTable();
private:
static const int kUninitializedCandidateOffset = -1;
void AddEntry(const PositionTableEntry& entry);
void CommitEntry();
......@@ -60,7 +58,6 @@ class SourcePositionTableBuilder : public PositionsRecorder {
#ifdef ENABLE_SLOW_DCHECKS
ZoneVector<PositionTableEntry> raw_entries_;
#endif
PositionTableEntry candidate_; // Next entry to be written, if initialized.
PositionTableEntry previous_; // Previously written entry, to compute delta.
};
......
......@@ -1580,12 +1580,15 @@ void Logger::LogCodeObjects() {
void Logger::LogBytecodeHandlers() {
if (!FLAG_ignition) return;
interpreter::Interpreter* interpreter = isolate_->interpreter();
const interpreter::OperandScale kOperandScales[] = {
#define VALUE(Name, _) interpreter::OperandScale::k##Name,
OPERAND_SCALE_LIST(VALUE)
#undef VALUE
};
const int last_index = static_cast<int>(interpreter::Bytecode::kLast);
for (auto operand_scale = interpreter::OperandScale::kSingle;
operand_scale <= interpreter::OperandScale::kMaxValid;
operand_scale =
interpreter::Bytecodes::NextOperandScale(operand_scale)) {
interpreter::Interpreter* interpreter = isolate_->interpreter();
for (auto operand_scale : kOperandScales) {
for (int index = 0; index <= last_index; ++index) {
interpreter::Bytecode bytecode = interpreter::Bytecodes::FromByte(index);
if (interpreter::Bytecodes::BytecodeHasHandler(bytecode, operand_scale)) {
......
......@@ -894,6 +894,12 @@
'interpreter/bytecode-array-builder.h',
'interpreter/bytecode-array-iterator.cc',
'interpreter/bytecode-array-iterator.h',
'interpreter/bytecode-array-writer.cc',
'interpreter/bytecode-array-writer.h',
'interpreter/bytecode-peephole-optimizer.cc',
'interpreter/bytecode-peephole-optimizer.h',
'interpreter/bytecode-pipeline.cc',
'interpreter/bytecode-pipeline.h',
'interpreter/bytecode-register-allocator.cc',
'interpreter/bytecode-register-allocator.h',
'interpreter/bytecode-generator.cc',
......
......@@ -196,7 +196,7 @@ snippet: "
class C { constructor() { count++; }}
return new C();
"
frame size: 10
frame size: 7
parameter count: 1
bytecode array length: 74
bytecodes: [
......
......@@ -15,7 +15,7 @@ snippet: "
"
frame size: 11
parameter count: 1
bytecode array length: 197
bytecode array length: 195
bytecodes: [
B(Ldar), R(new_target),
B(JumpIfUndefined), U8(12),
......@@ -64,10 +64,9 @@ bytecodes: [
B(Star), R(3),
B(LdaZero),
B(Star), R(2),
B(Jump), U8(38),
B(Jump), U8(36),
B(Ldar), R(7),
B(Throw),
B(Ldar), R(7),
B(LdaUndefined),
B(Star), R(5),
B(LdaTrue),
......@@ -111,7 +110,7 @@ bytecodes: [
constant pool: [
]
handlers: [
[30, 133, 139],
[30, 131, 137],
]
---
......@@ -120,7 +119,7 @@ snippet: "
"
frame size: 11
parameter count: 1
bytecode array length: 293
bytecode array length: 289
bytecodes: [
B(Ldar), R(new_target),
B(JumpIfUndefined), U8(18),
......@@ -131,7 +130,7 @@ bytecodes: [
B(JumpIfTrue), U8(55),
B(LdaSmi), U8(1),
B(TestEqualStrict), R(1),
B(JumpIfTrueConstant), U8(0),
B(JumpIfTrue), U8(127),
B(Illegal),
B(CallRuntime), U16(Runtime::kNewFunctionContext), R(closure), U8(1),
B(PushContext), R(0),
......@@ -172,10 +171,9 @@ bytecodes: [
B(Star), R(3),
B(LdaZero),
B(Star), R(2),
B(Jump), U8(119),
B(Jump), U8(115),
B(Ldar), R(7),
B(Throw),
B(Ldar), R(7),
/* 16 S> */ B(LdaSmi), U8(42),
B(Star), R(5),
B(LdaFalse),
......@@ -208,10 +206,9 @@ bytecodes: [
B(Star), R(3),
B(LdaSmi), U8(1),
B(Star), R(2),
B(Jump), U8(38),
B(Jump), U8(36),
B(Ldar), R(6),
B(Throw),
B(Ldar), R(6),
B(LdaUndefined),
B(Star), R(5),
B(LdaTrue),
......@@ -258,10 +255,9 @@ bytecodes: [
/* 25 S> */ B(Return),
]
constant pool: [
kInstanceTypeDontCare,
]
handlers: [
[36, 220, 226],
[36, 216, 222],
]
---
......@@ -270,7 +266,7 @@ snippet: "
"
frame size: 17
parameter count: 1
bytecode array length: 794
bytecode array length: 792
bytecodes: [
B(Ldar), R(new_target),
B(JumpIfUndefined), U8(18),
......@@ -325,7 +321,6 @@ bytecodes: [
B(JumpConstant), U8(17),
B(Ldar), R(9),
B(Throw),
B(Ldar), R(9),
B(LdaConstant), U8(0),
B(Star), R(7),
B(Ldar), R(closure),
......@@ -634,9 +629,9 @@ constant pool: [
kInstanceTypeDontCare,
]
handlers: [
[36, 712, 718],
[150, 448, 454],
[153, 399, 401],
[556, 571, 573],
[36, 710, 716],
[148, 446, 452],
[151, 397, 399],
[554, 569, 571],
]
......@@ -119,7 +119,7 @@ snippet: "
"
frame size: 3
parameter count: 1
bytecode array length: 32
bytecode array length: 24
bytecodes: [
/* 30 E> */ B(StackCheck),
/* 42 S> */ B(LdaSmi), U8(2),
......@@ -129,12 +129,8 @@ bytecodes: [
/* 56 S> */ B(LdaSmi), U8(4),
/* 56 E> */ B(Star), R(2),
/* 59 S> */ B(Ldar), R(0),
B(JumpIfToBooleanTrue), U8(16),
/* 72 E> */ B(Ldar), R(0),
/* 75 E> */ B(Ldar), R(1),
/* 78 E> */ B(Ldar), R(0),
/* 81 E> */ B(Ldar), R(1),
B(LdaSmi), U8(5),
B(JumpIfToBooleanTrue), U8(8),
/* 81 E> */ B(LdaSmi), U8(5),
/* 86 E> */ B(Star), R(2),
B(LdaSmi), U8(3),
/* 95 S> */ B(Return),
......
......@@ -22,11 +22,16 @@ class BytecodeArrayBuilderTest : public TestWithIsolateAndZone {
TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
BytecodeArrayBuilder builder(isolate(), zone(), 0, 1, 131);
Factory* factory = isolate()->factory();
CHECK_EQ(builder.locals_count(), 131);
CHECK_EQ(builder.context_count(), 1);
CHECK_EQ(builder.fixed_register_count(), 132);
Register reg(0);
Register other(reg.index() + 1);
Register wide(128);
// Emit argument creation operations.
builder.CreateArguments(CreateArgumentsType::kMappedArguments)
.CreateArguments(CreateArgumentsType::kUnmappedArguments)
......@@ -34,19 +39,27 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
// Emit constant loads.
builder.LoadLiteral(Smi::FromInt(0))
.StoreAccumulatorInRegister(reg)
.LoadLiteral(Smi::FromInt(8))
.StoreAccumulatorInRegister(reg)
.LoadLiteral(Smi::FromInt(10000000))
.StoreAccumulatorInRegister(reg)
.LoadLiteral(factory->NewStringFromStaticChars("A constant"))
.StoreAccumulatorInRegister(reg)
.LoadUndefined()
.StoreAccumulatorInRegister(reg)
.LoadNull()
.StoreAccumulatorInRegister(reg)
.LoadTheHole()
.StoreAccumulatorInRegister(reg)
.LoadTrue()
.LoadFalse();
.StoreAccumulatorInRegister(reg)
.LoadFalse()
.StoreAccumulatorInRegister(wide);
Register reg(0);
Register other(reg.index() + 1);
Register wide(128);
builder.LoadAccumulatorWithRegister(reg)
builder.StackCheck(0)
.LoadAccumulatorWithRegister(other)
.StoreAccumulatorInRegister(reg)
.LoadNull()
.StoreAccumulatorInRegister(reg);
......@@ -55,7 +68,6 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
builder.MoveRegister(reg, wide);
// Emit global load / store operations.
Factory* factory = isolate()->factory();
Handle<String> name = factory->NewStringFromStaticChars("var_name");
builder.LoadGlobal(name, 1, TypeofMode::NOT_INSIDE_TYPEOF)
.LoadGlobal(name, 1, TypeofMode::INSIDE_TYPEOF)
......@@ -331,6 +343,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
// Insert entry for illegal bytecode as this is never willingly emitted.
scorecard[Bytecodes::ToByte(Bytecode::kIllegal)] = 1;
// Insert entry for nop bytecode as this often gets optimized out.
scorecard[Bytecodes::ToByte(Bytecode::kNop)] = 1;
// Check return occurs at the end and only once in the BytecodeArray.
CHECK_EQ(final_bytecode, Bytecode::kReturn);
CHECK_EQ(scorecard[Bytecodes::ToByte(final_bytecode)], 1);
......@@ -462,7 +477,7 @@ TEST_F(BytecodeArrayBuilderTest, ForwardJumps) {
.BinaryOperation(Token::Value::ADD, reg)
.JumpIfFalse(&far4);
for (int i = 0; i < kFarJumpDistance - 18; i++) {
builder.LoadUndefined();
builder.Debugger();
}
builder.Bind(&far0).Bind(&far1).Bind(&far2).Bind(&far3).Bind(&far4);
builder.Return();
......@@ -503,7 +518,6 @@ TEST_F(BytecodeArrayBuilderTest, ForwardJumps) {
CHECK_EQ(iterator.GetImmediateOperand(0), 2);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpConstant);
CHECK_EQ(*iterator.GetConstantForIndexOperand(0),
Smi::FromInt(kFarJumpDistance));
......@@ -569,7 +583,7 @@ TEST_F(BytecodeArrayBuilderTest, BackwardJumps) {
// Add padding to force wide backwards jumps.
for (int i = 0; i < 256; i++) {
builder.LoadTrue();
builder.Debugger();
}
builder.BinaryOperation(Token::Value::ADD, reg).JumpIfFalse(&label4);
......@@ -616,7 +630,7 @@ TEST_F(BytecodeArrayBuilderTest, BackwardJumps) {
}
// Check padding to force wide backwards jumps.
for (int i = 0; i < 256; i++) {
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaTrue);
CHECK_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
iterator.Advance();
}
// Ignore binary operation.
......@@ -707,85 +721,6 @@ TEST_F(BytecodeArrayBuilderTest, LabelAddressReuse) {
CHECK(iterator.done());
}
TEST_F(BytecodeArrayBuilderTest, OperandScales) {
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kByte),
OperandScale::kSingle);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kQuad),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kQuad, OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kQuad, OperandSize::kShort,
OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kShort, OperandSize::kQuad,
OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kShort, OperandSize::kShort,
OperandSize::kQuad),
OperandScale::kQuadruple);
}
TEST_F(BytecodeArrayBuilderTest, SizesForSignOperands) {
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(0) == OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt8) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt8) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt8 + 1) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt8 - 1) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt16) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt16) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt16 + 1) ==
OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt16 - 1) ==
OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt) ==
OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt) ==
OperandSize::kQuad);
}
TEST_F(BytecodeArrayBuilderTest, SizesForUnsignOperands) {
// int overloads
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(0) == OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt8) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt8 + 1) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt16) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt16 + 1) ==
OperandSize::kQuad);
// size_t overloads
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(static_cast<size_t>(0)) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt8)) == OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt8 + 1)) == OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt16)) == OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt16 + 1)) == OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt32)) == OperandSize::kQuad);
}
} // namespace interpreter
} // namespace internal
} // namespace v8
......@@ -37,11 +37,17 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
int feedback_slot = 97;
builder.LoadLiteral(heap_num_0)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(heap_num_1)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(zero)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(smi_0)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(smi_1)
.StoreAccumulatorInRegister(reg_1)
.LoadAccumulatorWithRegister(reg_0)
.StoreAccumulatorInRegister(reg_1)
.LoadNamedProperty(reg_1, name, feedback_slot)
.StoreAccumulatorInRegister(param)
.CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, 1, reg_0)
......@@ -64,6 +70,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -72,6 +87,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaZero);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -79,6 +103,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -87,6 +120,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
......@@ -96,6 +138,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
kPrefixByteSize;
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -104,6 +155,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLoadIC);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......
This diff is collapsed.
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/v8.h"
#include "src/interpreter/bytecode-pipeline.h"
#include "src/interpreter/bytecode-register-allocator.h"
#include "src/isolate.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
namespace interpreter {
using BytecodeNodeTest = TestWithIsolateAndZone;
TEST(BytecodeSourceInfo, Operations) {
BytecodeSourceInfo x(0, true);
CHECK_EQ(x.source_position(), 0);
CHECK_EQ(x.is_statement(), true);
CHECK_EQ(x.is_valid(), true);
x.set_invalid();
CHECK_EQ(x.is_statement(), false);
CHECK_EQ(x.is_valid(), false);
x.Update({1, true});
BytecodeSourceInfo y(1, true);
CHECK(x == y);
CHECK(!(x != y));
x.set_invalid();
CHECK(!(x == y));
CHECK(x != y);
y.Update({2, false});
CHECK_EQ(y.source_position(), 1);
CHECK_EQ(y.is_statement(), true);
y.Update({2, true});
CHECK_EQ(y.source_position(), 2);
CHECK_EQ(y.is_statement(), true);
y.set_invalid();
y.Update({3, false});
CHECK_EQ(y.source_position(), 3);
CHECK_EQ(y.is_statement(), false);
y.Update({3, true});
CHECK_EQ(y.source_position(), 3);
CHECK_EQ(y.is_statement(), true);
}
TEST_F(BytecodeNodeTest, Constructor0) {
BytecodeNode node;
CHECK_EQ(node.bytecode(), Bytecode::kIllegal);
CHECK(!node.source_info().is_valid());
}
TEST_F(BytecodeNodeTest, Constructor1) {
BytecodeNode node(Bytecode::kLdaZero);
CHECK_EQ(node.bytecode(), Bytecode::kLdaZero);
CHECK_EQ(node.operand_count(), 0);
CHECK_EQ(node.operand_scale(), OperandScale::kSingle);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 1);
}
TEST_F(BytecodeNodeTest, Constructor2) {
uint32_t operands[] = {0x11};
BytecodeNode node(Bytecode::kJumpIfTrue, operands[0], OperandScale::kDouble);
CHECK_EQ(node.bytecode(), Bytecode::kJumpIfTrue);
CHECK_EQ(node.operand_count(), 1);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand_scale(), OperandScale::kDouble);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 4);
}
TEST_F(BytecodeNodeTest, Constructor3) {
uint32_t operands[] = {0x11, 0x22};
BytecodeNode node(Bytecode::kLdaGlobal, operands[0], operands[1],
OperandScale::kQuadruple);
CHECK_EQ(node.bytecode(), Bytecode::kLdaGlobal);
CHECK_EQ(node.operand_count(), 2);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand(1), operands[1]);
CHECK_EQ(node.operand_scale(), OperandScale::kQuadruple);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 10);
}
TEST_F(BytecodeNodeTest, Constructor4) {
uint32_t operands[] = {0x11, 0x22, 0x33};
BytecodeNode node(Bytecode::kLoadIC, operands[0], operands[1], operands[2],
OperandScale::kSingle);
CHECK_EQ(node.operand_count(), 3);
CHECK_EQ(node.bytecode(), Bytecode::kLoadIC);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand(1), operands[1]);
CHECK_EQ(node.operand(2), operands[2]);
CHECK_EQ(node.operand_scale(), OperandScale::kSingle);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 4);
}
TEST_F(BytecodeNodeTest, Constructor5) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
CHECK_EQ(node.operand_count(), 4);
CHECK_EQ(node.bytecode(), Bytecode::kForInNext);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand(1), operands[1]);
CHECK_EQ(node.operand(2), operands[2]);
CHECK_EQ(node.operand(3), operands[3]);
CHECK_EQ(node.operand_scale(), OperandScale::kDouble);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 10);
}
TEST_F(BytecodeNodeTest, Equality) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
CHECK_EQ(node, node);
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3], OperandScale::kDouble);
CHECK_EQ(node, other);
}
TEST_F(BytecodeNodeTest, EqualityWithSourceInfo) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
node.source_info().Update({3, true});
CHECK_EQ(node, node);
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3], OperandScale::kDouble);
other.source_info().Update({3, true});
CHECK_EQ(node, other);
}
TEST_F(BytecodeNodeTest, NoEqualityWithDifferentSourceInfo) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
node.source_info().Update({3, true});
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3], OperandScale::kDouble);
CHECK_NE(node, other);
}
TEST_F(BytecodeNodeTest, Clone) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
BytecodeNode clone;
clone.Clone(&node);
CHECK_EQ(clone, node);
}
TEST_F(BytecodeNodeTest, SetBytecode0) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
BytecodeSourceInfo source_info(77, false);
node.source_info().Update(source_info);
BytecodeNode clone;
clone.Clone(&node);
clone.set_bytecode(Bytecode::kNop);
CHECK_EQ(clone.bytecode(), Bytecode::kNop);
CHECK_EQ(clone.operand_count(), 0);
CHECK_EQ(clone.operand_scale(), OperandScale::kSingle);
CHECK_EQ(clone.source_info(), source_info);
}
TEST_F(BytecodeNodeTest, SetBytecode1) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
BytecodeSourceInfo source_info(77, false);
node.source_info().Update(source_info);
BytecodeNode clone;
clone.Clone(&node);
clone.set_bytecode(Bytecode::kJump, 0x01aabbcc, OperandScale::kQuadruple);
CHECK_EQ(clone.bytecode(), Bytecode::kJump);
CHECK_EQ(clone.operand_count(), 1);
CHECK_EQ(clone.operand(0), 0x01aabbcc);
CHECK_EQ(clone.operand_scale(), OperandScale::kQuadruple);
CHECK_EQ(clone.source_info(), source_info);
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_UNITTESTS_INTERPRETER_BYTECODE_UTILS_H_
#define V8_UNITTESTS_INTERPRETER_BYTECODE_UTILS_H_
#include "src/frames.h"
#if V8_TARGET_LITTLE_ENDIAN
#define EXTRACT(x, n) static_cast<uint8_t>((x) >> (8 * n))
#define U16(i) EXTRACT(i, 0), EXTRACT(i, 1)
#define U32(i) EXTRACT(i, 0), EXTRACT(i, 1), EXTRACT(i, 2), EXTRACT(i, 3)
#elif V8_TARGET_BIG_ENDIAN
#define EXTRACT(x, n) static_cast<uint8_t>((x) >> (8 * n))
#define U16(i) EXTRACT(i, 1), EXTRACT(i, 0)
#define U32(i) EXTRACT(i, 3), EXTRACT(i, 2), EXTRACT(i, 1), EXTRACT(i, 0)
#else
#error "Unknown Architecture"
#endif
#define U8(i) static_cast<uint8_t>(i)
#define B(Name) static_cast<uint8_t>(Bytecode::k##Name)
#define REG_OPERAND(i) \
(InterpreterFrameConstants::kRegisterFileFromFp / kPointerSize - (i))
#define R8(i) static_cast<uint8_t>(REG_OPERAND(i))
#define R16(i) U16(REG_OPERAND(i))
#define R32(i) U32(REG_OPERAND(i))
#endif // V8_UNITTESTS_INTERPRETER_BYTECODE_UTILS_H_
......@@ -7,6 +7,7 @@
#include "src/v8.h"
#include "src/interpreter/bytecodes.h"
#include "test/unittests/interpreter/bytecode-utils.h"
#include "test/unittests/test-utils.h"
namespace v8 {
......@@ -76,9 +77,13 @@ TEST(OperandConversion, RegistersParametersNoOverlap) {
}
TEST(OperandScaling, ScalableAndNonScalable) {
for (OperandScale operand_scale = OperandScale::kSingle;
operand_scale <= OperandScale::kMaxValid;
operand_scale = Bytecodes::NextOperandScale(operand_scale)) {
const OperandScale kOperandScales[] = {
#define VALUE(Name, _) OperandScale::k##Name,
OPERAND_SCALE_LIST(VALUE)
#undef VALUE
};
for (OperandScale operand_scale : kOperandScales) {
int scale = static_cast<int>(operand_scale);
CHECK_EQ(Bytecodes::Size(Bytecode::kCallRuntime, operand_scale),
1 + 2 + 2 * scale);
......@@ -176,94 +181,39 @@ TEST(Bytecodes, DecodeBytecodeAndOperands) {
const char* output;
};
#define B(Name) static_cast<uint8_t>(Bytecode::k##Name)
#define REG_OPERAND(i) \
(InterpreterFrameConstants::kRegisterFileFromFp / kPointerSize - (i))
#define REG8(i) static_cast<uint8_t>(REG_OPERAND(i))
#if V8_TARGET_LITTLE_ENDIAN
#define REG16(i) \
static_cast<uint8_t>(REG_OPERAND(i)), \
static_cast<uint8_t>(REG_OPERAND(i) >> 8)
#elif V8_TARGET_BIG_ENDIAN
#define REG16(i) \
static_cast<uint8_t>(REG_OPERAND(i) >> 8), \
static_cast<uint8_t>(REG_OPERAND(i))
#else
#error "Unknown Architecture"
#endif
const BytecodesAndResult cases[] = {
#if V8_TARGET_LITTLE_ENDIAN
{{B(LdaSmi), 0x01}, 2, 0, " LdaSmi [1]"},
{{B(Wide), B(LdaSmi), 0xe8, 0x03}, 4, 0, " LdaSmi.Wide [1000]"},
{{B(ExtraWide), B(LdaSmi), 0xa0, 0x86, 0x01, 0x00},
6,
0,
"LdaSmi.ExtraWide [100000]"},
{{B(LdaSmi), 0xff}, 2, 0, " LdaSmi [-1]"},
{{B(Wide), B(LdaSmi), 0x18, 0xfc}, 4, 0, " LdaSmi.Wide [-1000]"},
{{B(ExtraWide), B(LdaSmi), 0x60, 0x79, 0xfe, 0xff},
6,
0,
"LdaSmi.ExtraWide [-100000]"},
{{B(Star), REG8(5)}, 2, 0, " Star r5"},
{{B(Wide), B(Star), REG16(136)}, 4, 0, " Star.Wide r136"},
{{B(Wide), B(Call), REG16(134), REG16(135), 0x02, 0x00, 0xb1, 0x00},
10,
0,
"Call.Wide r134, r135, #2, [177]"},
{{B(Ldar),
static_cast<uint8_t>(Register::FromParameterIndex(2, 3).ToOperand())},
2,
3,
" Ldar a1"},
{{B(Wide), B(CreateObjectLiteral), 0x01, 0x02, 0x03, 0x04, 0xa5},
7,
0,
"CreateObjectLiteral.Wide [513], [1027], #165"},
{{B(ExtraWide), B(JumpIfNull), 0x15, 0xcd, 0x5b, 0x07},
6,
0,
"JumpIfNull.ExtraWide [123456789]"},
#elif V8_TARGET_BIG_ENDIAN
{{B(LdaSmi), 0x01}, 2, 0, " LdaSmi [1]"},
{{B(Wide), B(LdaSmi), 0x03, 0xe8}, 4, 0, " LdaSmi.Wide [1000]"},
{{B(ExtraWide), B(LdaSmi), 0x00, 0x01, 0x86, 0xa0},
6,
0,
"LdaSmi.ExtraWide [100000]"},
{{B(LdaSmi), 0xff}, 2, 0, " LdaSmi [-1]"},
{{B(Wide), B(LdaSmi), 0xfc, 0x18}, 4, 0, " LdaSmi.Wide [-1000]"},
{{B(ExtraWide), B(LdaSmi), 0xff, 0xfe, 0x79, 0x60},
6,
0,
"LdaSmi.ExtraWide [-100000]"},
{{B(Star), REG8(5)}, 2, 0, " Star r5"},
{{B(Wide), B(Star), REG16(136)}, 4, 0, " Star.Wide r136"},
{{B(Wide), B(Call), REG16(134), REG16(135), 0x00, 0x02, 0x00, 0xb1},
10,
0,
"Call.Wide r134, r135, #2, [177]"},
{{B(Ldar),
static_cast<uint8_t>(Register::FromParameterIndex(2, 3).ToOperand())},
2,
3,
" Ldar a1"},
{{B(Wide), B(CreateObjectLiteral), 0x02, 0x01, 0x04, 0x03, 0xa5},
7,
0,
"CreateObjectLiteral.Wide [513], [1027], #165"},
{{B(ExtraWide), B(JumpIfNull), 0x07, 0x5b, 0xcd, 0x15},
6,
0,
"JumpIfNull.ExtraWide [123456789]"},
#else
#error "Unknown Architecture"
#endif
{{B(LdaSmi), U8(0x01)}, 2, 0, " LdaSmi [1]"},
{{B(Wide), B(LdaSmi), U16(1000)}, 4, 0, " LdaSmi.Wide [1000]"},
{{B(ExtraWide), B(LdaSmi), U32(100000)},
6,
0,
"LdaSmi.ExtraWide [100000]"},
{{B(LdaSmi), 0xff}, 2, 0, " LdaSmi [-1]"},
{{B(Wide), B(LdaSmi), 0x18, 0xfc}, 4, 0, " LdaSmi.Wide [-1000]"},
{{B(ExtraWide), B(LdaSmi), U32(-100000)},
6,
0,
"LdaSmi.ExtraWide [-100000]"},
{{B(Star), R8(5)}, 2, 0, " Star r5"},
{{B(Wide), B(Star), R16(136)}, 4, 0, " Star.Wide r136"},
{{B(Wide), B(Call), R16(134), R16(135), U16(0x02), U16(177)},
10,
0,
"Call.Wide r134, r135, #2, [177]"},
{{B(Ldar),
static_cast<uint8_t>(Register::FromParameterIndex(2, 3).ToOperand())},
2,
3,
" Ldar a1"},
{{B(Wide), B(CreateObjectLiteral), U16(513), U16(1027), U16(165)},
7,
0,
"CreateObjectLiteral.Wide [513], [1027], #165"},
{{B(ExtraWide), B(JumpIfNull), U32(123456789)},
6,
0,
"JumpIfNull.ExtraWide [123456789]"},
};
#undef B
#undef REG_OPERAND
#undef REG8
#undef REG16
for (size_t i = 0; i < arraysize(cases); ++i) {
// Generate reference string by prepending formatted bytes.
......@@ -304,13 +254,71 @@ TEST(Bytecodes, PrefixMappings) {
}
}
TEST(OperandScale, PrefixesScale) {
CHECK(Bytecodes::NextOperandScale(OperandScale::kSingle) ==
OperandScale::kDouble);
CHECK(Bytecodes::NextOperandScale(OperandScale::kDouble) ==
OperandScale::kQuadruple);
CHECK(Bytecodes::NextOperandScale(OperandScale::kQuadruple) ==
OperandScale::kInvalid);
TEST(Bytecodes, OperandScales) {
CHECK_EQ(Bytecodes::OperandSizesToScale(OperandSize::kByte),
OperandScale::kSingle);
CHECK_EQ(Bytecodes::OperandSizesToScale(OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(Bytecodes::OperandSizesToScale(OperandSize::kQuad),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort, OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kQuad, OperandSize::kShort,
OperandSize::kShort, OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kQuad,
OperandSize::kShort, OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kShort,
OperandSize::kQuad, OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort, OperandSize::kQuad),
OperandScale::kQuadruple);
}
TEST(Bytecodes, SizesForSignedOperands) {
CHECK(Bytecodes::SizeForSignedOperand(0) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt8 + 1) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt8 - 1) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt16 + 1) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt16 - 1) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt) == OperandSize::kQuad);
}
TEST(Bytecodes, SizesForUnsignedOperands) {
// int overloads
CHECK(Bytecodes::SizeForUnsignedOperand(0) == OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt8 + 1) ==
OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt16 + 1) ==
OperandSize::kQuad);
// size_t overloads
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(0)) ==
OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt8)) ==
OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt8 + 1)) ==
OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt16)) ==
OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt16 + 1)) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt32)) ==
OperandSize::kQuad);
}
TEST(OperandScale, PrefixesRequired) {
......
......@@ -25,7 +25,7 @@ static int offsets[] = {0, 1, 2, 3, 4, 30, 31, 32,
TEST_F(SourcePositionTableTest, EncodeStatement) {
SourcePositionTableBuilder builder(isolate(), zone());
for (int i = 0; i < arraysize(offsets); i++) {
builder.AddStatementPosition(offsets[i], offsets[i]);
builder.AddPosition(offsets[i], offsets[i], true);
}
// To test correctness, we rely on the assertions in ToSourcePositionTable().
......@@ -36,8 +36,8 @@ TEST_F(SourcePositionTableTest, EncodeStatement) {
TEST_F(SourcePositionTableTest, EncodeStatementDuplicates) {
SourcePositionTableBuilder builder(isolate(), zone());
for (int i = 0; i < arraysize(offsets); i++) {
builder.AddStatementPosition(offsets[i], offsets[i]);
builder.AddStatementPosition(offsets[i], offsets[i] + 1);
builder.AddPosition(offsets[i], offsets[i], true);
builder.AddPosition(offsets[i], offsets[i] + 1, true);
}
// To test correctness, we rely on the assertions in ToSourcePositionTable().
......@@ -48,7 +48,7 @@ TEST_F(SourcePositionTableTest, EncodeStatementDuplicates) {
TEST_F(SourcePositionTableTest, EncodeExpression) {
SourcePositionTableBuilder builder(isolate(), zone());
for (int i = 0; i < arraysize(offsets); i++) {
builder.AddExpressionPosition(offsets[i], offsets[i]);
builder.AddPosition(offsets[i], offsets[i], false);
}
CHECK(!builder.ToSourcePositionTable().is_null());
}
......@@ -60,9 +60,9 @@ TEST_F(SourcePositionTableTest, EncodeAscending) {
for (int i = 0; i < arraysize(offsets); i++) {
accumulator += offsets[i];
if (i % 2) {
builder.AddStatementPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, true);
} else {
builder.AddExpressionPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, false);
}
}
......@@ -70,9 +70,9 @@ TEST_F(SourcePositionTableTest, EncodeAscending) {
for (int i = 0; i < arraysize(offsets); i++) {
accumulator -= offsets[i];
if (i % 2) {
builder.AddStatementPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, true);
} else {
builder.AddExpressionPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, false);
}
}
......
......@@ -96,7 +96,10 @@
'interpreter/bytecodes-unittest.cc',
'interpreter/bytecode-array-builder-unittest.cc',
'interpreter/bytecode-array-iterator-unittest.cc',
'interpreter/bytecode-array-writer-unittest.cc',
'interpreter/bytecode-peephole-optimizer-unittest.cc',
'interpreter/bytecode-register-allocator-unittest.cc',
'interpreter/bytecode-pipeline-unittest.cc',
'interpreter/constant-array-builder-unittest.cc',
'interpreter/interpreter-assembler-unittest.cc',
'interpreter/interpreter-assembler-unittest.h',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment