Commit 02b7373a authored by oth's avatar oth Committed by Commit bot

[interpreter] Introduce bytecode generation pipeline.

This change introduces a pipeline for the final stages of
bytecode generation.

The peephole optimizer is made distinct from the BytecodeArrayBuilder.

A new BytecodeArrayWriter is responsible for writing bytecode. It
also keeps track of the maximum register seen and offers a potentially
smaller frame size.

R=rmcilroy@chromium.org
LOG=N
BUG=v8:4280

Review-Url: https://codereview.chromium.org/1947403002
Cr-Commit-Position: refs/heads/master@{#36220}
parent fa7460ad
......@@ -1235,8 +1235,14 @@ v8_source_set("v8_base") {
"src/interpreter/bytecode-array-builder.h",
"src/interpreter/bytecode-array-iterator.cc",
"src/interpreter/bytecode-array-iterator.h",
"src/interpreter/bytecode-array-writer.cc",
"src/interpreter/bytecode-array-writer.h",
"src/interpreter/bytecode-generator.cc",
"src/interpreter/bytecode-generator.h",
"src/interpreter/bytecode-peephole-optimizer.cc",
"src/interpreter/bytecode-peephole-optimizer.h",
"src/interpreter/bytecode-pipeline.cc",
"src/interpreter/bytecode-pipeline.h",
"src/interpreter/bytecode-register-allocator.cc",
"src/interpreter/bytecode-register-allocator.h",
"src/interpreter/bytecode-traits.h",
......
......@@ -1423,6 +1423,8 @@ void BytecodeGraphBuilder::VisitIllegal() {
jsgraph()->Constant(kIllegalBytecode));
}
void BytecodeGraphBuilder::VisitNop() {}
void BytecodeGraphBuilder::SwitchToMergeEnvironment(int current_offset) {
if (merge_environments_[current_offset] != nullptr) {
if (environment() != nullptr) {
......
......@@ -304,6 +304,7 @@ DEFINE_BOOL(ignition_eager, true, "eagerly compile and parse with ignition")
DEFINE_BOOL(ignition_generators, false,
"enable experimental ignition support for generators")
DEFINE_STRING(ignition_filter, "*", "filter for ignition interpreter")
DEFINE_BOOL(ignition_peephole, true, "use ignition peephole optimizer")
DEFINE_BOOL(print_bytecode, false,
"print bytecode generated by ignition interpreter")
DEFINE_BOOL(trace_ignition, false,
......
......@@ -3,117 +3,43 @@
// found in the LICENSE file.
#include "src/interpreter/bytecode-array-builder.h"
#include "src/compiler.h"
#include "src/interpreter/bytecode-array-writer.h"
#include "src/interpreter/bytecode-peephole-optimizer.h"
#include "src/interpreter/interpreter-intrinsics.h"
namespace v8 {
namespace internal {
namespace interpreter {
class BytecodeArrayBuilder::PreviousBytecodeHelper BASE_EMBEDDED {
public:
explicit PreviousBytecodeHelper(const BytecodeArrayBuilder& array_builder)
: array_builder_(array_builder),
previous_bytecode_start_(array_builder_.last_bytecode_start_) {
// This helper is expected to be instantiated only when the last bytecode is
// in the same basic block.
DCHECK(array_builder_.LastBytecodeInSameBlock());
bytecode_ = Bytecodes::FromByte(
array_builder_.bytecodes()->at(previous_bytecode_start_));
operand_scale_ = OperandScale::kSingle;
if (Bytecodes::IsPrefixScalingBytecode(bytecode_)) {
operand_scale_ = Bytecodes::PrefixBytecodeToOperandScale(bytecode_);
bytecode_ = Bytecodes::FromByte(
array_builder_.bytecodes()->at(previous_bytecode_start_ + 1));
}
}
// Returns the previous bytecode in the same basic block.
MUST_USE_RESULT Bytecode GetBytecode() const {
DCHECK_EQ(array_builder_.last_bytecode_start_, previous_bytecode_start_);
return bytecode_;
}
MUST_USE_RESULT Register GetRegisterOperand(int operand_index) const {
return Register::FromOperand(GetSignedOperand(operand_index));
}
MUST_USE_RESULT uint32_t GetIndexOperand(int operand_index) const {
return GetUnsignedOperand(operand_index);
}
Handle<Object> GetConstantForIndexOperand(int operand_index) const {
return array_builder_.constant_array_builder()->At(
GetIndexOperand(operand_index));
}
private:
// Returns the signed operand at operand_index for the previous
// bytecode in the same basic block.
MUST_USE_RESULT int32_t GetSignedOperand(int operand_index) const {
DCHECK_EQ(array_builder_.last_bytecode_start_, previous_bytecode_start_);
OperandType operand_type =
Bytecodes::GetOperandType(bytecode_, operand_index);
DCHECK(!Bytecodes::IsUnsignedOperandType(operand_type));
const uint8_t* operand_start = GetOperandStart(operand_index);
return Bytecodes::DecodeSignedOperand(operand_start, operand_type,
operand_scale_);
}
// Returns the unsigned operand at operand_index for the previous
// bytecode in the same basic block.
MUST_USE_RESULT uint32_t GetUnsignedOperand(int operand_index) const {
DCHECK_EQ(array_builder_.last_bytecode_start_, previous_bytecode_start_);
OperandType operand_type =
Bytecodes::GetOperandType(bytecode_, operand_index);
DCHECK(Bytecodes::IsUnsignedOperandType(operand_type));
const uint8_t* operand_start = GetOperandStart(operand_index);
return Bytecodes::DecodeUnsignedOperand(operand_start, operand_type,
operand_scale_);
}
const uint8_t* GetOperandStart(int operand_index) const {
size_t operand_offset =
previous_bytecode_start_ + prefix_offset() +
Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale_);
return &(*array_builder_.bytecodes())[0] + operand_offset;
}
int prefix_offset() const {
return Bytecodes::OperandScaleRequiresPrefixBytecode(operand_scale_) ? 1
: 0;
}
const BytecodeArrayBuilder& array_builder_;
OperandScale operand_scale_;
Bytecode bytecode_;
size_t previous_bytecode_start_;
DISALLOW_COPY_AND_ASSIGN(PreviousBytecodeHelper);
};
BytecodeArrayBuilder::BytecodeArrayBuilder(Isolate* isolate, Zone* zone,
int parameter_count,
int context_count, int locals_count,
FunctionLiteral* literal)
: isolate_(isolate),
zone_(zone),
bytecodes_(zone),
bytecode_generated_(false),
constant_array_builder_(isolate, zone),
handler_table_builder_(isolate, zone),
source_position_table_builder_(isolate, zone),
last_block_end_(0),
last_bytecode_start_(~0),
exit_seen_in_block_(false),
unbound_jumps_(0),
parameter_count_(parameter_count),
local_register_count_(locals_count),
context_register_count_(context_count),
temporary_allocator_(zone, fixed_register_count()) {
temporary_allocator_(zone, fixed_register_count()),
bytecode_array_writer_(zone, &source_position_table_builder_),
pipeline_(&bytecode_array_writer_) {
DCHECK_GE(parameter_count_, 0);
DCHECK_GE(context_register_count_, 0);
DCHECK_GE(local_register_count_, 0);
if (FLAG_ignition_peephole) {
pipeline_ = new (zone)
BytecodePeepholeOptimizer(&constant_array_builder_, pipeline_);
}
return_position_ =
literal ? std::max(literal->start_position(), literal->end_position() - 1)
: RelocInfo::kNoPosition;
......@@ -149,15 +75,22 @@ Handle<BytecodeArray> BytecodeArrayBuilder::ToBytecodeArray() {
DCHECK_EQ(bytecode_generated_, false);
DCHECK(exit_seen_in_block_);
int bytecode_size = static_cast<int>(bytecodes_.size());
int register_count = fixed_and_temporary_register_count();
int frame_size = register_count * kPointerSize;
pipeline()->FlushBasicBlock();
const ZoneVector<uint8_t>* bytecodes = bytecode_array_writer()->bytecodes();
int bytecode_size = static_cast<int>(bytecodes->size());
// All locals need a frame slot for the debugger, but may not be
// present in generated code.
int frame_size_for_locals = fixed_register_count() * kPointerSize;
int frame_size_used = bytecode_array_writer()->GetMaximumFrameSizeUsed();
int frame_size = std::max(frame_size_for_locals, frame_size_used);
Handle<FixedArray> constant_pool = constant_array_builder()->ToFixedArray();
Handle<FixedArray> handler_table = handler_table_builder()->ToHandlerTable();
Handle<ByteArray> source_position_table =
source_position_table_builder()->ToSourcePositionTable();
Handle<BytecodeArray> bytecode_array = isolate_->factory()->NewBytecodeArray(
bytecode_size, &bytecodes_.front(), frame_size, parameter_count(),
bytecode_size, &bytecodes->front(), frame_size, parameter_count(),
constant_pool);
bytecode_array->set_handler_table(*handler_table);
bytecode_array->set_source_position_table(*source_position_table);
......@@ -170,50 +103,10 @@ Handle<BytecodeArray> BytecodeArrayBuilder::ToBytecodeArray() {
return bytecode_array;
}
template <size_t N>
void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t (&operands)[N],
OperandScale operand_scale) {
// Don't output dead code.
if (exit_seen_in_block_) return;
int operand_count = static_cast<int>(N);
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count);
last_bytecode_start_ = bytecodes()->size();
// Emit prefix bytecode for scale if required.
if (Bytecodes::OperandScaleRequiresPrefixBytecode(operand_scale)) {
bytecodes()->push_back(Bytecodes::ToByte(
Bytecodes::OperandScaleToPrefixBytecode(operand_scale)));
}
// Emit bytecode.
bytecodes()->push_back(Bytecodes::ToByte(bytecode));
// Emit operands.
for (int i = 0; i < operand_count; i++) {
DCHECK(OperandIsValid(bytecode, operand_scale, i, operands[i]));
switch (Bytecodes::GetOperandSize(bytecode, i, operand_scale)) {
case OperandSize::kNone:
UNREACHABLE();
break;
case OperandSize::kByte:
bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
break;
case OperandSize::kShort: {
uint8_t operand_bytes[2];
WriteUnalignedUInt16(operand_bytes, operands[i]);
bytecodes()->insert(bytecodes()->end(), operand_bytes,
operand_bytes + 2);
break;
}
case OperandSize::kQuad: {
uint8_t operand_bytes[4];
WriteUnalignedUInt32(operand_bytes, operands[i]);
bytecodes()->insert(bytecodes()->end(), operand_bytes,
operand_bytes + 4);
break;
}
}
void BytecodeArrayBuilder::AttachSourceInfo(BytecodeNode* node) {
if (latest_source_info_.is_valid()) {
node->source_info().Update(latest_source_info_);
latest_source_info_.set_invalid();
}
}
......@@ -221,44 +114,68 @@ void BytecodeArrayBuilder::Output(Bytecode bytecode) {
// Don't output dead code.
if (exit_seen_in_block_) return;
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
last_bytecode_start_ = bytecodes()->size();
bytecodes()->push_back(Bytecodes::ToByte(bytecode));
BytecodeNode node(bytecode);
AttachSourceInfo(&node);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::OutputScaled(Bytecode bytecode,
OperandScale operand_scale,
uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3) {
uint32_t operands[] = {operand0, operand1, operand2, operand3};
Output(bytecode, operands, operand_scale);
// Don't output dead code.
if (exit_seen_in_block_) return;
DCHECK(OperandIsValid(bytecode, operand_scale, 0, operand0));
DCHECK(OperandIsValid(bytecode, operand_scale, 1, operand1));
DCHECK(OperandIsValid(bytecode, operand_scale, 2, operand2));
DCHECK(OperandIsValid(bytecode, operand_scale, 3, operand3));
BytecodeNode node(bytecode, operand0, operand1, operand2, operand3,
operand_scale);
AttachSourceInfo(&node);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::OutputScaled(Bytecode bytecode,
OperandScale operand_scale,
uint32_t operand0, uint32_t operand1,
uint32_t operand2) {
uint32_t operands[] = {operand0, operand1, operand2};
Output(bytecode, operands, operand_scale);
// Don't output dead code.
if (exit_seen_in_block_) return;
DCHECK(OperandIsValid(bytecode, operand_scale, 0, operand0));
DCHECK(OperandIsValid(bytecode, operand_scale, 1, operand1));
DCHECK(OperandIsValid(bytecode, operand_scale, 2, operand2));
BytecodeNode node(bytecode, operand0, operand1, operand2, operand_scale);
AttachSourceInfo(&node);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::OutputScaled(Bytecode bytecode,
OperandScale operand_scale,
uint32_t operand0, uint32_t operand1) {
uint32_t operands[] = {operand0, operand1};
Output(bytecode, operands, operand_scale);
// Don't output dead code.
if (exit_seen_in_block_) return;
DCHECK(OperandIsValid(bytecode, operand_scale, 0, operand0));
DCHECK(OperandIsValid(bytecode, operand_scale, 1, operand1));
BytecodeNode node(bytecode, operand0, operand1, operand_scale);
AttachSourceInfo(&node);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::OutputScaled(Bytecode bytecode,
OperandScale operand_scale,
uint32_t operand0) {
uint32_t operands[] = {operand0};
Output(bytecode, operands, operand_scale);
// Don't output dead code.
if (exit_seen_in_block_) return;
DCHECK(OperandIsValid(bytecode, operand_scale, 0, operand0));
BytecodeNode node(bytecode, operand0, operand_scale);
AttachSourceInfo(&node);
pipeline()->Write(&node);
}
BytecodeArrayBuilder& BytecodeArrayBuilder::BinaryOperation(Token::Value op,
Register reg) {
OperandScale operand_scale = OperandSizesToScale(reg.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(reg.SizeOfOperand());
OutputScaled(BytecodeForBinaryOperation(op), operand_scale,
RegisterOperand(reg));
return *this;
......@@ -283,7 +200,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::TypeOf() {
BytecodeArrayBuilder& BytecodeArrayBuilder::CompareOperation(Token::Value op,
Register reg) {
OperandScale operand_scale = OperandSizesToScale(reg.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(reg.SizeOfOperand());
OutputScaled(BytecodeForCompareOperation(op), operand_scale,
RegisterOperand(reg));
return *this;
......@@ -296,8 +214,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLiteral(
if (raw_smi == 0) {
Output(Bytecode::kLdaZero);
} else {
OperandSize operand_size = SizeForSignedOperand(raw_smi);
OperandScale operand_scale = OperandSizesToScale(operand_size);
OperandSize operand_size = Bytecodes::SizeForSignedOperand(raw_smi);
OperandScale operand_scale = Bytecodes::OperandSizesToScale(operand_size);
OutputScaled(Bytecode::kLdaSmi, operand_scale,
SignedOperand(raw_smi, operand_size));
}
......@@ -308,7 +226,7 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLiteral(
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLiteral(Handle<Object> object) {
size_t entry = GetConstantPoolEntry(object);
OperandScale operand_scale =
OperandSizesToScale(SizeForUnsignedOperand(entry));
Bytecodes::OperandSizesToScale(Bytecodes::SizeForUnsignedOperand(entry));
OutputScaled(Bytecode::kLdaConstant, operand_scale, UnsignedOperand(entry));
return *this;
}
......@@ -345,20 +263,18 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadFalse() {
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadAccumulatorWithRegister(
Register reg) {
if (!IsRegisterInAccumulator(reg)) {
OperandScale operand_scale = OperandSizesToScale(reg.SizeOfOperand());
OutputScaled(Bytecode::kLdar, operand_scale, RegisterOperand(reg));
}
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(reg.SizeOfOperand());
OutputScaled(Bytecode::kLdar, operand_scale, RegisterOperand(reg));
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::StoreAccumulatorInRegister(
Register reg) {
if (!IsRegisterInAccumulator(reg)) {
OperandScale operand_scale = OperandSizesToScale(reg.SizeOfOperand());
OutputScaled(Bytecode::kStar, operand_scale, RegisterOperand(reg));
}
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(reg.SizeOfOperand());
OutputScaled(Bytecode::kStar, operand_scale, RegisterOperand(reg));
return *this;
}
......@@ -367,7 +283,7 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::MoveRegister(Register from,
Register to) {
DCHECK(from != to);
OperandScale operand_scale =
OperandSizesToScale(from.SizeOfOperand(), to.SizeOfOperand());
Bytecodes::OperandSizesToScale(from.SizeOfOperand(), to.SizeOfOperand());
OutputScaled(Bytecode::kMov, operand_scale, RegisterOperand(from),
RegisterOperand(to));
return *this;
......@@ -379,9 +295,9 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadGlobal(
// operand rather than having extra bytecodes.
Bytecode bytecode = BytecodeForLoadGlobal(typeof_mode);
size_t name_index = GetConstantPoolEntry(name);
OperandScale operand_scale =
OperandSizesToScale(SizeForUnsignedOperand(name_index),
SizeForUnsignedOperand(feedback_slot));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(name_index),
Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(bytecode, operand_scale, UnsignedOperand(name_index),
UnsignedOperand(feedback_slot));
return *this;
......@@ -391,9 +307,9 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StoreGlobal(
const Handle<String> name, int feedback_slot, LanguageMode language_mode) {
Bytecode bytecode = BytecodeForStoreGlobal(language_mode);
size_t name_index = GetConstantPoolEntry(name);
OperandScale operand_scale =
OperandSizesToScale(SizeForUnsignedOperand(name_index),
SizeForUnsignedOperand(feedback_slot));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(name_index),
Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(bytecode, operand_scale, UnsignedOperand(name_index),
UnsignedOperand(feedback_slot));
return *this;
......@@ -402,8 +318,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StoreGlobal(
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadContextSlot(Register context,
int slot_index) {
OperandScale operand_scale = OperandSizesToScale(
context.SizeOfOperand(), SizeForUnsignedOperand(slot_index));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
context.SizeOfOperand(), Bytecodes::SizeForUnsignedOperand(slot_index));
OutputScaled(Bytecode::kLdaContextSlot, operand_scale,
RegisterOperand(context), UnsignedOperand(slot_index));
return *this;
......@@ -412,8 +328,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadContextSlot(Register context,
BytecodeArrayBuilder& BytecodeArrayBuilder::StoreContextSlot(Register context,
int slot_index) {
OperandScale operand_scale = OperandSizesToScale(
context.SizeOfOperand(), SizeForUnsignedOperand(slot_index));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
context.SizeOfOperand(), Bytecodes::SizeForUnsignedOperand(slot_index));
OutputScaled(Bytecode::kStaContextSlot, operand_scale,
RegisterOperand(context), UnsignedOperand(slot_index));
return *this;
......@@ -425,8 +341,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLookupSlot(
? Bytecode::kLdaLookupSlotInsideTypeof
: Bytecode::kLdaLookupSlot;
size_t name_index = GetConstantPoolEntry(name);
OperandScale operand_scale =
OperandSizesToScale(SizeForUnsignedOperand(name_index));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(name_index));
OutputScaled(bytecode, operand_scale, UnsignedOperand(name_index));
return *this;
}
......@@ -435,8 +351,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StoreLookupSlot(
const Handle<String> name, LanguageMode language_mode) {
Bytecode bytecode = BytecodeForStoreLookupSlot(language_mode);
size_t name_index = GetConstantPoolEntry(name);
OperandScale operand_scale =
OperandSizesToScale(SizeForUnsignedOperand(name_index));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(name_index));
OutputScaled(bytecode, operand_scale, UnsignedOperand(name_index));
return *this;
}
......@@ -444,9 +360,9 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StoreLookupSlot(
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadNamedProperty(
Register object, const Handle<Name> name, int feedback_slot) {
size_t name_index = GetConstantPoolEntry(name);
OperandScale operand_scale = OperandSizesToScale(
object.SizeOfOperand(), SizeForUnsignedOperand(name_index),
SizeForUnsignedOperand(feedback_slot));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
object.SizeOfOperand(), Bytecodes::SizeForUnsignedOperand(name_index),
Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(Bytecode::kLoadIC, operand_scale, RegisterOperand(object),
UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
return *this;
......@@ -454,8 +370,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadNamedProperty(
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadKeyedProperty(
Register object, int feedback_slot) {
OperandScale operand_scale = OperandSizesToScale(
object.SizeOfOperand(), SizeForUnsignedOperand(feedback_slot));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
object.SizeOfOperand(), Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(Bytecode::kKeyedLoadIC, operand_scale, RegisterOperand(object),
UnsignedOperand(feedback_slot));
return *this;
......@@ -466,9 +382,9 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StoreNamedProperty(
LanguageMode language_mode) {
Bytecode bytecode = BytecodeForStoreIC(language_mode);
size_t name_index = GetConstantPoolEntry(name);
OperandScale operand_scale = OperandSizesToScale(
object.SizeOfOperand(), SizeForUnsignedOperand(name_index),
SizeForUnsignedOperand(feedback_slot));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
object.SizeOfOperand(), Bytecodes::SizeForUnsignedOperand(name_index),
Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(bytecode, operand_scale, RegisterOperand(object),
UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
return *this;
......@@ -479,9 +395,9 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StoreKeyedProperty(
Register object, Register key, int feedback_slot,
LanguageMode language_mode) {
Bytecode bytecode = BytecodeForKeyedStoreIC(language_mode);
OperandScale operand_scale =
OperandSizesToScale(object.SizeOfOperand(), key.SizeOfOperand(),
SizeForUnsignedOperand(feedback_slot));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
object.SizeOfOperand(), key.SizeOfOperand(),
Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(bytecode, operand_scale, RegisterOperand(object),
RegisterOperand(key), UnsignedOperand(feedback_slot));
return *this;
......@@ -492,7 +408,7 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CreateClosure(
Handle<SharedFunctionInfo> shared_info, PretenureFlag tenured) {
size_t entry = GetConstantPoolEntry(shared_info);
OperandScale operand_scale =
OperandSizesToScale(SizeForUnsignedOperand(entry));
Bytecodes::OperandSizesToScale(Bytecodes::SizeForUnsignedOperand(entry));
OutputScaled(Bytecode::kCreateClosure, operand_scale, UnsignedOperand(entry),
UnsignedOperand(static_cast<size_t>(tenured)));
return *this;
......@@ -513,9 +429,10 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CreateArguments(
BytecodeArrayBuilder& BytecodeArrayBuilder::CreateRegExpLiteral(
Handle<String> pattern, int literal_index, int flags) {
size_t pattern_entry = GetConstantPoolEntry(pattern);
OperandScale operand_scale = OperandSizesToScale(
SizeForUnsignedOperand(pattern_entry),
SizeForUnsignedOperand(literal_index), SizeForUnsignedOperand(flags));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(pattern_entry),
Bytecodes::SizeForUnsignedOperand(literal_index),
Bytecodes::SizeForUnsignedOperand(flags));
OutputScaled(Bytecode::kCreateRegExpLiteral, operand_scale,
UnsignedOperand(pattern_entry), UnsignedOperand(literal_index),
UnsignedOperand(flags));
......@@ -526,9 +443,10 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CreateRegExpLiteral(
BytecodeArrayBuilder& BytecodeArrayBuilder::CreateArrayLiteral(
Handle<FixedArray> constant_elements, int literal_index, int flags) {
size_t constant_elements_entry = GetConstantPoolEntry(constant_elements);
OperandScale operand_scale = OperandSizesToScale(
SizeForUnsignedOperand(constant_elements_entry),
SizeForUnsignedOperand(literal_index), SizeForUnsignedOperand(flags));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(constant_elements_entry),
Bytecodes::SizeForUnsignedOperand(literal_index),
Bytecodes::SizeForUnsignedOperand(flags));
OutputScaled(Bytecode::kCreateArrayLiteral, operand_scale,
UnsignedOperand(constant_elements_entry),
UnsignedOperand(literal_index), UnsignedOperand(flags));
......@@ -539,9 +457,10 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CreateArrayLiteral(
BytecodeArrayBuilder& BytecodeArrayBuilder::CreateObjectLiteral(
Handle<FixedArray> constant_properties, int literal_index, int flags) {
size_t constant_properties_entry = GetConstantPoolEntry(constant_properties);
OperandScale operand_scale = OperandSizesToScale(
SizeForUnsignedOperand(constant_properties_entry),
SizeForUnsignedOperand(literal_index), SizeForUnsignedOperand(flags));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(constant_properties_entry),
Bytecodes::SizeForUnsignedOperand(literal_index),
Bytecodes::SizeForUnsignedOperand(flags));
OutputScaled(Bytecode::kCreateObjectLiteral, operand_scale,
UnsignedOperand(constant_properties_entry),
UnsignedOperand(literal_index), UnsignedOperand(flags));
......@@ -550,46 +469,21 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CreateObjectLiteral(
BytecodeArrayBuilder& BytecodeArrayBuilder::PushContext(Register context) {
OperandScale operand_scale = OperandSizesToScale(context.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(context.SizeOfOperand());
OutputScaled(Bytecode::kPushContext, operand_scale, RegisterOperand(context));
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::PopContext(Register context) {
OperandScale operand_scale = OperandSizesToScale(context.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(context.SizeOfOperand());
OutputScaled(Bytecode::kPopContext, operand_scale, RegisterOperand(context));
return *this;
}
bool BytecodeArrayBuilder::NeedToBooleanCast() {
if (!LastBytecodeInSameBlock()) {
return true;
}
PreviousBytecodeHelper previous_bytecode(*this);
switch (previous_bytecode.GetBytecode()) {
// If the previous bytecode puts a boolean in the accumulator return true.
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kLogicalNot:
case Bytecode::kTestEqual:
case Bytecode::kTestNotEqual:
case Bytecode::kTestEqualStrict:
case Bytecode::kTestLessThan:
case Bytecode::kTestLessThanOrEqual:
case Bytecode::kTestGreaterThan:
case Bytecode::kTestGreaterThanOrEqual:
case Bytecode::kTestInstanceOf:
case Bytecode::kTestIn:
case Bytecode::kForInDone:
return false;
default:
return true;
}
}
BytecodeArrayBuilder& BytecodeArrayBuilder::CastAccumulatorToJSObject() {
Output(Bytecode::kToObject);
return *this;
......@@ -597,41 +491,24 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CastAccumulatorToJSObject() {
BytecodeArrayBuilder& BytecodeArrayBuilder::CastAccumulatorToName() {
if (LastBytecodeInSameBlock()) {
PreviousBytecodeHelper previous_bytecode(*this);
switch (previous_bytecode.GetBytecode()) {
case Bytecode::kToName:
case Bytecode::kTypeOf:
return *this;
case Bytecode::kLdaConstant: {
Handle<Object> object = previous_bytecode.GetConstantForIndexOperand(0);
if (object->IsName()) return *this;
break;
}
default:
break;
}
}
Output(Bytecode::kToName);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::CastAccumulatorToNumber() {
// TODO(rmcilroy): consider omitting if the preceeding bytecode always returns
// a number.
Output(Bytecode::kToNumber);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::Bind(BytecodeLabel* label) {
size_t current_offset = pipeline()->FlushForOffset();
if (label->is_forward_target()) {
// An earlier jump instruction refers to this label. Update it's location.
PatchJump(bytecodes()->end(), bytecodes()->begin() + label->offset());
PatchJump(current_offset, label->offset());
// Now treat as if the label will only be back referred to.
}
label->bind_to(bytecodes()->size());
label->bind_to(current_offset);
LeaveBasicBlock();
return *this;
}
......@@ -641,10 +518,11 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::Bind(const BytecodeLabel& target,
BytecodeLabel* label) {
DCHECK(!label->is_bound());
DCHECK(target.is_bound());
// There is no need to flush the pipeline here, it will have been
// flushed when |target| was bound.
if (label->is_forward_target()) {
// An earlier jump instruction refers to this label. Update it's location.
PatchJump(bytecodes()->begin() + target.offset(),
bytecodes()->begin() + label->offset());
PatchJump(target.offset(), label->offset());
// Now treat as if the label will only be back referred to.
}
label->bind_to(target.offset());
......@@ -679,90 +557,74 @@ Bytecode BytecodeArrayBuilder::GetJumpWithConstantOperand(
}
}
// static
Bytecode BytecodeArrayBuilder::GetJumpWithToBoolean(Bytecode jump_bytecode) {
switch (jump_bytecode) {
case Bytecode::kJump:
case Bytecode::kJumpIfNull:
case Bytecode::kJumpIfUndefined:
case Bytecode::kJumpIfNotHole:
return jump_bytecode;
case Bytecode::kJumpIfTrue:
return Bytecode::kJumpIfToBooleanTrue;
case Bytecode::kJumpIfFalse:
return Bytecode::kJumpIfToBooleanFalse;
default:
UNREACHABLE();
}
return Bytecode::kIllegal;
}
void BytecodeArrayBuilder::PatchIndirectJumpWith8BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta) {
Bytecode jump_bytecode = Bytecodes::FromByte(*jump_location);
void BytecodeArrayBuilder::PatchJumpWith8BitOperand(
ZoneVector<uint8_t>* bytecodes, size_t jump_location, int delta) {
Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes->at(jump_location));
DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
ZoneVector<uint8_t>::iterator operand_location = jump_location + 1;
DCHECK_EQ(*operand_location, 0);
if (SizeForSignedOperand(delta) == OperandSize::kByte) {
size_t operand_location = jump_location + 1;
DCHECK_EQ(bytecodes->at(operand_location), 0);
if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) {
// The jump fits within the range of an Imm operand, so cancel
// the reservation and jump directly.
constant_array_builder()->DiscardReservedEntry(OperandSize::kByte);
*operand_location = static_cast<uint8_t>(delta);
bytecodes->at(operand_location) = static_cast<uint8_t>(delta);
} else {
// The jump does not fit within the range of an Imm operand, so
// commit reservation putting the offset into the constant pool,
// and update the jump instruction and operand.
size_t entry = constant_array_builder()->CommitReservedEntry(
OperandSize::kByte, handle(Smi::FromInt(delta), isolate()));
DCHECK(SizeForUnsignedOperand(entry) == OperandSize::kByte);
DCHECK(Bytecodes::SizeForUnsignedOperand(entry) == OperandSize::kByte);
jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
*jump_location = Bytecodes::ToByte(jump_bytecode);
*operand_location = static_cast<uint8_t>(entry);
bytecodes->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
bytecodes->at(operand_location) = static_cast<uint8_t>(entry);
}
}
void BytecodeArrayBuilder::PatchIndirectJumpWith16BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta) {
Bytecode jump_bytecode = Bytecodes::FromByte(*jump_location);
void BytecodeArrayBuilder::PatchJumpWith16BitOperand(
ZoneVector<uint8_t>* bytecodes, size_t jump_location, int delta) {
Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes->at(jump_location));
DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
ZoneVector<uint8_t>::iterator operand_location = jump_location + 1;
size_t operand_location = jump_location + 1;
uint8_t operand_bytes[2];
if (SizeForSignedOperand(delta) <= OperandSize::kShort) {
if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) {
constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta));
} else {
jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
*jump_location = Bytecodes::ToByte(jump_bytecode);
bytecodes->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
size_t entry = constant_array_builder()->CommitReservedEntry(
OperandSize::kShort, handle(Smi::FromInt(delta), isolate()));
WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry));
}
DCHECK(*operand_location == 0 && *(operand_location + 1) == 0);
*operand_location++ = operand_bytes[0];
*operand_location = operand_bytes[1];
DCHECK(bytecodes->at(operand_location) == 0 &&
bytecodes->at(operand_location + 1) == 0);
bytecodes->at(operand_location++) = operand_bytes[0];
bytecodes->at(operand_location) = operand_bytes[1];
}
void BytecodeArrayBuilder::PatchIndirectJumpWith32BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta) {
DCHECK(Bytecodes::IsJumpImmediate(Bytecodes::FromByte(*jump_location)));
void BytecodeArrayBuilder::PatchJumpWith32BitOperand(
ZoneVector<uint8_t>* bytecodes, size_t jump_location, int delta) {
DCHECK(Bytecodes::IsJumpImmediate(
Bytecodes::FromByte(bytecodes->at(jump_location))));
constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad);
ZoneVector<uint8_t>::iterator operand_location = jump_location + 1;
uint8_t operand_bytes[4];
WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta));
DCHECK(*operand_location == 0 && *(operand_location + 1) == 0 &&
*(operand_location + 2) == 0 && *(operand_location + 3) == 0);
*operand_location++ = operand_bytes[0];
*operand_location++ = operand_bytes[1];
*operand_location++ = operand_bytes[2];
*operand_location = operand_bytes[3];
}
void BytecodeArrayBuilder::PatchJump(
const ZoneVector<uint8_t>::iterator& jump_target,
const ZoneVector<uint8_t>::iterator& jump_location) {
size_t operand_location = jump_location + 1;
DCHECK(bytecodes->at(operand_location) == 0 &&
bytecodes->at(operand_location + 1) == 0 &&
bytecodes->at(operand_location + 2) == 0 &&
bytecodes->at(operand_location + 3) == 0);
bytecodes->at(operand_location++) = operand_bytes[0];
bytecodes->at(operand_location++) = operand_bytes[1];
bytecodes->at(operand_location++) = operand_bytes[2];
bytecodes->at(operand_location) = operand_bytes[3];
}
void BytecodeArrayBuilder::PatchJump(size_t jump_target, size_t jump_location) {
ZoneVector<uint8_t>* bytecodes = bytecode_array_writer()->bytecodes();
Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes->at(jump_location));
int delta = static_cast<int>(jump_target - jump_location);
Bytecode jump_bytecode = Bytecodes::FromByte(*jump_location);
int prefix_offset = 0;
OperandScale operand_scale = OperandScale::kSingle;
if (Bytecodes::IsPrefixScalingBytecode(jump_bytecode)) {
......@@ -771,19 +633,22 @@ void BytecodeArrayBuilder::PatchJump(
delta -= 1;
prefix_offset = 1;
operand_scale = Bytecodes::PrefixBytecodeToOperandScale(jump_bytecode);
jump_bytecode = Bytecodes::FromByte(*(jump_location + prefix_offset));
jump_bytecode =
Bytecodes::FromByte(bytecodes->at(jump_location + prefix_offset));
}
DCHECK(Bytecodes::IsJump(jump_bytecode));
switch (operand_scale) {
case OperandScale::kSingle:
PatchIndirectJumpWith8BitOperand(jump_location, delta);
PatchJumpWith8BitOperand(bytecodes, jump_location, delta);
break;
case OperandScale::kDouble:
PatchIndirectJumpWith16BitOperand(jump_location + prefix_offset, delta);
PatchJumpWith16BitOperand(bytecodes, jump_location + prefix_offset,
delta);
break;
case OperandScale::kQuadruple:
PatchIndirectJumpWith32BitOperand(jump_location + prefix_offset, delta);
PatchJumpWith32BitOperand(bytecodes, jump_location + prefix_offset,
delta);
break;
default:
UNREACHABLE();
......@@ -797,25 +662,20 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::OutputJump(Bytecode jump_bytecode,
// Don't emit dead code.
if (exit_seen_in_block_) return *this;
// Check if the value in accumulator is boolean, if not choose an
// appropriate JumpIfToBoolean bytecode.
if (NeedToBooleanCast()) {
jump_bytecode = GetJumpWithToBoolean(jump_bytecode);
}
if (label->is_bound()) {
// Label has been bound already so this is a backwards jump.
CHECK_GE(bytecodes()->size(), label->offset());
CHECK_LE(bytecodes()->size(), static_cast<size_t>(kMaxInt));
size_t abs_delta = bytecodes()->size() - label->offset();
size_t current_offset = pipeline()->FlushForOffset();
CHECK_GE(current_offset, label->offset());
CHECK_LE(current_offset, static_cast<size_t>(kMaxInt));
size_t abs_delta = current_offset - label->offset();
int delta = -static_cast<int>(abs_delta);
OperandSize operand_size = SizeForSignedOperand(delta);
OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta);
if (operand_size > OperandSize::kByte) {
// Adjust for scaling byte prefix for wide jump offset.
DCHECK_LE(delta, 0);
delta -= 1;
}
OutputScaled(jump_bytecode, OperandSizesToScale(operand_size),
OutputScaled(jump_bytecode, Bytecodes::OperandSizesToScale(operand_size),
SignedOperand(delta, operand_size));
} else {
// The label has not yet been bound so this is a forward reference
......@@ -824,37 +684,47 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::OutputJump(Bytecode jump_bytecode,
// when the label is bound. The reservation means the maximum size
// of the operand for the constant is known and the jump can
// be emitted into the bytecode stream with space for the operand.
label->set_referrer(bytecodes()->size());
unbound_jumps_++;
OperandSize reserved_operand_size =
constant_array_builder()->CreateReservedEntry();
OutputScaled(jump_bytecode, OperandSizesToScale(reserved_operand_size), 0);
OutputScaled(jump_bytecode,
Bytecodes::OperandSizesToScale(reserved_operand_size), 0);
// Calculate the label position by flushing for offset after emitting the
// jump bytecode.
size_t offset = pipeline()->FlushForOffset();
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(reserved_operand_size);
offset -= Bytecodes::Size(jump_bytecode, operand_scale);
if (Bytecodes::OperandScaleRequiresPrefixBytecode(operand_scale)) {
offset -= 1;
}
label->set_referrer(offset);
}
LeaveBasicBlock();
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::Jump(BytecodeLabel* label) {
return OutputJump(Bytecode::kJump, label);
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfTrue(BytecodeLabel* label) {
return OutputJump(Bytecode::kJumpIfTrue, label);
// The peephole optimizer attempts to simplify JumpIfToBooleanTrue
// to JumpIfTrue.
return OutputJump(Bytecode::kJumpIfToBooleanTrue, label);
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfFalse(BytecodeLabel* label) {
return OutputJump(Bytecode::kJumpIfFalse, label);
// The peephole optimizer attempts to simplify JumpIfToBooleanFalse
// to JumpIfFalse.
return OutputJump(Bytecode::kJumpIfToBooleanFalse, label);
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfNull(BytecodeLabel* label) {
return OutputJump(Bytecode::kJumpIfNull, label);
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfUndefined(
BytecodeLabel* label) {
return OutputJump(Bytecode::kJumpIfUndefined, label);
......@@ -864,8 +734,7 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StackCheck(int position) {
if (position != RelocInfo::kNoPosition) {
// We need to attach a non-breakable source position to a stack check,
// so we simply add it as expression position.
source_position_table_builder_.AddExpressionPosition(bytecodes_.size(),
position);
latest_source_info_.Update({position, false});
}
Output(Bytecode::kStackCheck);
return *this;
......@@ -910,7 +779,7 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::Debugger() {
BytecodeArrayBuilder& BytecodeArrayBuilder::ForInPrepare(
Register cache_info_triple) {
OperandScale operand_scale =
OperandSizesToScale(cache_info_triple.SizeOfOperand());
Bytecodes::OperandSizesToScale(cache_info_triple.SizeOfOperand());
OutputScaled(Bytecode::kForInPrepare, operand_scale,
RegisterOperand(cache_info_triple));
return *this;
......@@ -918,8 +787,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::ForInPrepare(
BytecodeArrayBuilder& BytecodeArrayBuilder::ForInDone(Register index,
Register cache_length) {
OperandScale operand_scale =
OperandSizesToScale(index.SizeOfOperand(), cache_length.SizeOfOperand());
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
index.SizeOfOperand(), cache_length.SizeOfOperand());
OutputScaled(Bytecode::kForInDone, operand_scale, RegisterOperand(index),
RegisterOperand(cache_length));
return *this;
......@@ -928,10 +797,10 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::ForInDone(Register index,
BytecodeArrayBuilder& BytecodeArrayBuilder::ForInNext(
Register receiver, Register index, Register cache_type_array_pair,
int feedback_slot) {
OperandScale operand_scale =
OperandSizesToScale(receiver.SizeOfOperand(), index.SizeOfOperand(),
cache_type_array_pair.SizeOfOperand(),
SizeForUnsignedOperand(feedback_slot));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
receiver.SizeOfOperand(), index.SizeOfOperand(),
cache_type_array_pair.SizeOfOperand(),
Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(Bytecode::kForInNext, operand_scale, RegisterOperand(receiver),
RegisterOperand(index), RegisterOperand(cache_type_array_pair),
UnsignedOperand(feedback_slot));
......@@ -940,7 +809,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::ForInNext(
BytecodeArrayBuilder& BytecodeArrayBuilder::ForInStep(Register index) {
OperandScale operand_scale = OperandSizesToScale(index.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(index.SizeOfOperand());
OutputScaled(Bytecode::kForInStep, operand_scale, RegisterOperand(index));
return *this;
}
......@@ -948,7 +818,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::ForInStep(Register index) {
BytecodeArrayBuilder& BytecodeArrayBuilder::SuspendGenerator(
Register generator) {
OperandScale operand_scale = OperandSizesToScale(generator.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(generator.SizeOfOperand());
OutputScaled(Bytecode::kSuspendGenerator, operand_scale,
RegisterOperand(generator));
return *this;
......@@ -957,7 +828,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::SuspendGenerator(
BytecodeArrayBuilder& BytecodeArrayBuilder::ResumeGenerator(
Register generator) {
OperandScale operand_scale = OperandSizesToScale(generator.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(generator.SizeOfOperand());
OutputScaled(Bytecode::kResumeGenerator, operand_scale,
RegisterOperand(generator));
return *this;
......@@ -966,7 +838,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::ResumeGenerator(
BytecodeArrayBuilder& BytecodeArrayBuilder::MarkHandler(int handler_id,
bool will_catch) {
handler_table_builder()->SetHandlerTarget(handler_id, bytecodes()->size());
size_t offset = pipeline()->FlushForOffset();
handler_table_builder()->SetHandlerTarget(handler_id, offset);
handler_table_builder()->SetPrediction(handler_id, will_catch);
return *this;
}
......@@ -974,21 +847,23 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::MarkHandler(int handler_id,
BytecodeArrayBuilder& BytecodeArrayBuilder::MarkTryBegin(int handler_id,
Register context) {
handler_table_builder()->SetTryRegionStart(handler_id, bytecodes()->size());
size_t offset = pipeline()->FlushForOffset();
handler_table_builder()->SetTryRegionStart(handler_id, offset);
handler_table_builder()->SetContextRegister(handler_id, context);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::MarkTryEnd(int handler_id) {
handler_table_builder()->SetTryRegionEnd(handler_id, bytecodes()->size());
size_t offset = pipeline()->FlushForOffset();
handler_table_builder()->SetTryRegionEnd(handler_id, offset);
return *this;
}
void BytecodeArrayBuilder::LeaveBasicBlock() {
last_block_end_ = bytecodes()->size();
exit_seen_in_block_ = false;
pipeline()->FlushBasicBlock();
}
void BytecodeArrayBuilder::EnsureReturn() {
......@@ -1005,10 +880,10 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::Call(Register callable,
int feedback_slot,
TailCallMode tail_call_mode) {
Bytecode bytecode = BytecodeForCall(tail_call_mode);
OperandScale operand_scale = OperandSizesToScale(
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
callable.SizeOfOperand(), receiver_args.SizeOfOperand(),
SizeForUnsignedOperand(receiver_args_count),
SizeForUnsignedOperand(feedback_slot));
Bytecodes::SizeForUnsignedOperand(receiver_args_count),
Bytecodes::SizeForUnsignedOperand(feedback_slot));
OutputScaled(bytecode, operand_scale, RegisterOperand(callable),
RegisterOperand(receiver_args),
UnsignedOperand(receiver_args_count),
......@@ -1023,9 +898,9 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::New(Register constructor,
DCHECK_EQ(0u, arg_count);
first_arg = Register(0);
}
OperandScale operand_scale = OperandSizesToScale(
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
constructor.SizeOfOperand(), first_arg.SizeOfOperand(),
SizeForUnsignedOperand(arg_count));
Bytecodes::SizeForUnsignedOperand(arg_count));
OutputScaled(Bytecode::kNew, operand_scale, RegisterOperand(constructor),
RegisterOperand(first_arg), UnsignedOperand(arg_count));
return *this;
......@@ -1035,7 +910,7 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::New(Register constructor,
BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntime(
Runtime::FunctionId function_id, Register first_arg, size_t arg_count) {
DCHECK_EQ(1, Runtime::FunctionForId(function_id)->result_size);
DCHECK(SizeForUnsignedOperand(function_id) <= OperandSize::kShort);
DCHECK(Bytecodes::SizeForUnsignedOperand(function_id) <= OperandSize::kShort);
if (!first_arg.is_valid()) {
DCHECK_EQ(0u, arg_count);
first_arg = Register(0);
......@@ -1043,8 +918,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntime(
Bytecode bytecode = IntrinsicsHelper::IsSupported(function_id)
? Bytecode::kInvokeIntrinsic
: Bytecode::kCallRuntime;
OperandScale operand_scale = OperandSizesToScale(
first_arg.SizeOfOperand(), SizeForUnsignedOperand(arg_count));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
first_arg.SizeOfOperand(), Bytecodes::SizeForUnsignedOperand(arg_count));
OutputScaled(bytecode, operand_scale, static_cast<uint16_t>(function_id),
RegisterOperand(first_arg), UnsignedOperand(arg_count));
return *this;
......@@ -1055,13 +930,13 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntimeForPair(
Runtime::FunctionId function_id, Register first_arg, size_t arg_count,
Register first_return) {
DCHECK_EQ(2, Runtime::FunctionForId(function_id)->result_size);
DCHECK(SizeForUnsignedOperand(function_id) <= OperandSize::kShort);
DCHECK(Bytecodes::SizeForUnsignedOperand(function_id) <= OperandSize::kShort);
if (!first_arg.is_valid()) {
DCHECK_EQ(0u, arg_count);
first_arg = Register(0);
}
OperandScale operand_scale = OperandSizesToScale(
first_arg.SizeOfOperand(), SizeForUnsignedOperand(arg_count),
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
first_arg.SizeOfOperand(), Bytecodes::SizeForUnsignedOperand(arg_count),
first_return.SizeOfOperand());
OutputScaled(Bytecode::kCallRuntimeForPair, operand_scale,
static_cast<uint16_t>(function_id), RegisterOperand(first_arg),
......@@ -1071,9 +946,10 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntimeForPair(
BytecodeArrayBuilder& BytecodeArrayBuilder::CallJSRuntime(
int context_index, Register receiver_args, size_t receiver_args_count) {
OperandScale operand_scale = OperandSizesToScale(
SizeForUnsignedOperand(context_index), receiver_args.SizeOfOperand(),
SizeForUnsignedOperand(receiver_args_count));
OperandScale operand_scale = Bytecodes::OperandSizesToScale(
Bytecodes::SizeForUnsignedOperand(context_index),
receiver_args.SizeOfOperand(),
Bytecodes::SizeForUnsignedOperand(receiver_args_count));
OutputScaled(Bytecode::kCallJSRuntime, operand_scale,
UnsignedOperand(context_index), RegisterOperand(receiver_args),
UnsignedOperand(receiver_args_count));
......@@ -1083,7 +959,8 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CallJSRuntime(
BytecodeArrayBuilder& BytecodeArrayBuilder::Delete(Register object,
LanguageMode language_mode) {
OperandScale operand_scale = OperandSizesToScale(object.SizeOfOperand());
OperandScale operand_scale =
Bytecodes::OperandSizesToScale(object.SizeOfOperand());
OutputScaled(BytecodeForDelete(language_mode), operand_scale,
RegisterOperand(object));
return *this;
......@@ -1096,29 +973,25 @@ size_t BytecodeArrayBuilder::GetConstantPoolEntry(Handle<Object> object) {
void BytecodeArrayBuilder::SetReturnPosition() {
if (return_position_ == RelocInfo::kNoPosition) return;
if (exit_seen_in_block_) return;
source_position_table_builder_.AddStatementPosition(bytecodes_.size(),
return_position_);
latest_source_info_.Update({return_position_, true});
}
void BytecodeArrayBuilder::SetStatementPosition(Statement* stmt) {
if (stmt->position() == RelocInfo::kNoPosition) return;
if (exit_seen_in_block_) return;
source_position_table_builder_.AddStatementPosition(bytecodes_.size(),
stmt->position());
latest_source_info_.Update({stmt->position(), true});
}
void BytecodeArrayBuilder::SetExpressionPosition(Expression* expr) {
if (expr->position() == RelocInfo::kNoPosition) return;
if (exit_seen_in_block_) return;
source_position_table_builder_.AddExpressionPosition(bytecodes_.size(),
expr->position());
latest_source_info_.Update({expr->position(), false});
}
void BytecodeArrayBuilder::SetExpressionAsStatementPosition(Expression* expr) {
if (expr->position() == RelocInfo::kNoPosition) return;
if (exit_seen_in_block_) return;
source_position_table_builder_.AddStatementPosition(bytecodes_.size(),
expr->position());
latest_source_info_.Update({expr->position(), true});
}
bool BytecodeArrayBuilder::TemporaryRegisterIsLive(Register reg) const {
......@@ -1150,7 +1023,7 @@ bool BytecodeArrayBuilder::OperandIsValid(Bytecode bytecode,
case OperandType::kRuntimeId:
case OperandType::kImm: {
size_t unsigned_value = static_cast<size_t>(operand_value);
return SizeForUnsignedOperand(unsigned_value) <= operand_size;
return Bytecodes::SizeForUnsignedOperand(unsigned_value) <= operand_size;
}
case OperandType::kMaybeReg:
if (RegisterFromOperand(operand_value) == Register(0)) {
......@@ -1207,25 +1080,6 @@ bool BytecodeArrayBuilder::RegisterIsValid(Register reg,
}
}
bool BytecodeArrayBuilder::LastBytecodeInSameBlock() const {
return last_bytecode_start_ < bytecodes()->size() &&
last_bytecode_start_ >= last_block_end_;
}
bool BytecodeArrayBuilder::IsRegisterInAccumulator(Register reg) {
if (LastBytecodeInSameBlock()) {
PreviousBytecodeHelper previous_bytecode(*this);
Bytecode bytecode = previous_bytecode.GetBytecode();
if (bytecode == Bytecode::kLdar || bytecode == Bytecode::kStar) {
return previous_bytecode.GetRegisterOperand(0) == reg;
}
}
return false;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForBinaryOperation(Token::Value op) {
switch (op) {
......@@ -1407,65 +1261,6 @@ Bytecode BytecodeArrayBuilder::BytecodeForCall(TailCallMode tail_call_mode) {
return Bytecode::kIllegal;
}
// static
OperandSize BytecodeArrayBuilder::SizeForSignedOperand(int value) {
if (kMinInt8 <= value && value <= kMaxInt8) {
return OperandSize::kByte;
} else if (kMinInt16 <= value && value <= kMaxInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
// static
OperandSize BytecodeArrayBuilder::SizeForUnsignedOperand(int value) {
DCHECK_GE(value, 0);
if (value <= kMaxUInt8) {
return OperandSize::kByte;
} else if (value <= kMaxUInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
OperandSize BytecodeArrayBuilder::SizeForUnsignedOperand(size_t value) {
if (value <= static_cast<size_t>(kMaxUInt8)) {
return OperandSize::kByte;
} else if (value <= static_cast<size_t>(kMaxUInt16)) {
return OperandSize::kShort;
} else if (value <= kMaxUInt32) {
return OperandSize::kQuad;
} else {
UNREACHABLE();
return OperandSize::kQuad;
}
}
OperandScale BytecodeArrayBuilder::OperandSizesToScale(OperandSize size0,
OperandSize size1,
OperandSize size2,
OperandSize size3) {
OperandSize upper = std::max(size0, size1);
OperandSize lower = std::max(size2, size3);
OperandSize result = std::max(upper, lower);
// Operand sizes have been scaled before calling this function.
// Currently all scalable operands are byte sized at
// OperandScale::kSingle.
STATIC_ASSERT(static_cast<int>(OperandSize::kByte) ==
static_cast<int>(OperandScale::kSingle) &&
static_cast<int>(OperandSize::kShort) ==
static_cast<int>(OperandScale::kDouble) &&
static_cast<int>(OperandSize::kQuad) ==
static_cast<int>(OperandScale::kQuadruple));
OperandScale operand_scale = static_cast<OperandScale>(result);
DCHECK(operand_scale == OperandScale::kSingle ||
operand_scale == OperandScale::kDouble ||
operand_scale == OperandScale::kQuadruple);
return operand_scale;
}
uint32_t BytecodeArrayBuilder::RegisterOperand(Register reg) {
return static_cast<uint32_t>(reg.ToOperand());
}
......
......@@ -6,6 +6,7 @@
#define V8_INTERPRETER_BYTECODE_ARRAY_BUILDER_H_
#include "src/ast/ast.h"
#include "src/interpreter/bytecode-array-writer.h"
#include "src/interpreter/bytecode-register-allocator.h"
#include "src/interpreter/bytecodes.h"
#include "src/interpreter/constant-array-builder.h"
......@@ -21,6 +22,8 @@ class Isolate;
namespace interpreter {
class BytecodeLabel;
class BytecodeNode;
class BytecodePipelineStage;
class Register;
class BytecodeArrayBuilder final : public ZoneObject {
......@@ -262,26 +265,16 @@ class BytecodeArrayBuilder final : public ZoneObject {
void SetExpressionAsStatementPosition(Expression* expr);
// Accessors
Zone* zone() const { return zone_; }
TemporaryRegisterAllocator* temporary_register_allocator() {
return &temporary_allocator_;
}
const TemporaryRegisterAllocator* temporary_register_allocator() const {
return &temporary_allocator_;
}
Zone* zone() const { return zone_; }
void EnsureReturn();
static OperandScale OperandSizesToScale(
OperandSize size0, OperandSize size1 = OperandSize::kByte,
OperandSize size2 = OperandSize::kByte,
OperandSize size3 = OperandSize::kByte);
static OperandSize SizeForRegisterOperand(Register reg);
static OperandSize SizeForSignedOperand(int value);
static OperandSize SizeForUnsignedOperand(int value);
static OperandSize SizeForUnsignedOperand(size_t value);
static uint32_t RegisterOperand(Register reg);
static Register RegisterFromOperand(uint32_t operand);
static uint32_t SignedOperand(int value, OperandSize size);
......@@ -289,7 +282,6 @@ class BytecodeArrayBuilder final : public ZoneObject {
static uint32_t UnsignedOperand(size_t value);
private:
class PreviousBytecodeHelper;
friend class BytecodeRegisterAllocator;
static Bytecode BytecodeForBinaryOperation(Token::Value op);
......@@ -305,11 +297,7 @@ class BytecodeArrayBuilder final : public ZoneObject {
static Bytecode BytecodeForCall(TailCallMode tail_call_mode);
static Bytecode GetJumpWithConstantOperand(Bytecode jump_smi8_operand);
static Bytecode GetJumpWithToBoolean(Bytecode jump_smi8_operand);
template <size_t N>
INLINE(void Output(Bytecode bytecode, uint32_t (&operands)[N],
OperandScale operand_scale = OperandScale::kSingle));
void Output(Bytecode bytecode);
void OutputScaled(Bytecode bytecode, OperandScale operand_scale,
uint32_t operand0, uint32_t operand1, uint32_t operand2,
......@@ -323,14 +311,13 @@ class BytecodeArrayBuilder final : public ZoneObject {
BytecodeArrayBuilder& OutputJump(Bytecode jump_bytecode,
BytecodeLabel* label);
void PatchJump(const ZoneVector<uint8_t>::iterator& jump_target,
const ZoneVector<uint8_t>::iterator& jump_location);
void PatchIndirectJumpWith8BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta);
void PatchIndirectJumpWith16BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta);
void PatchIndirectJumpWith32BitOperand(
const ZoneVector<uint8_t>::iterator& jump_location, int delta);
void PatchJump(size_t jump_target, size_t jump_location);
void PatchJumpWith8BitOperand(ZoneVector<uint8_t>* bytecodes,
size_t jump_location, int delta);
void PatchJumpWith16BitOperand(ZoneVector<uint8_t>* bytecodes,
size_t jump_location, int delta);
void PatchJumpWith32BitOperand(ZoneVector<uint8_t>* bytecodes,
size_t jump_location, int delta);
void LeaveBasicBlock();
......@@ -338,9 +325,8 @@ class BytecodeArrayBuilder final : public ZoneObject {
int operand_index, uint32_t operand_value) const;
bool RegisterIsValid(Register reg, OperandSize reg_size) const;
bool LastBytecodeInSameBlock() const;
bool NeedToBooleanCast();
bool IsRegisterInAccumulator(Register reg);
// Attach latest source position to |node|.
void AttachSourceInfo(BytecodeNode* node);
// Set position for return.
void SetReturnPosition();
......@@ -348,9 +334,11 @@ class BytecodeArrayBuilder final : public ZoneObject {
// Gets a constant pool entry for the |object|.
size_t GetConstantPoolEntry(Handle<Object> object);
ZoneVector<uint8_t>* bytecodes() { return &bytecodes_; }
const ZoneVector<uint8_t>* bytecodes() const { return &bytecodes_; }
Isolate* isolate() const { return isolate_; }
BytecodeArrayWriter* bytecode_array_writer() {
return &bytecode_array_writer_;
}
BytecodePipelineStage* pipeline() { return pipeline_; }
ConstantArrayBuilder* constant_array_builder() {
return &constant_array_builder_;
}
......@@ -366,13 +354,10 @@ class BytecodeArrayBuilder final : public ZoneObject {
Isolate* isolate_;
Zone* zone_;
ZoneVector<uint8_t> bytecodes_;
bool bytecode_generated_;
ConstantArrayBuilder constant_array_builder_;
HandlerTableBuilder handler_table_builder_;
SourcePositionTableBuilder source_position_table_builder_;
size_t last_block_end_;
size_t last_bytecode_start_;
bool exit_seen_in_block_;
int unbound_jumps_;
int parameter_count_;
......@@ -380,6 +365,9 @@ class BytecodeArrayBuilder final : public ZoneObject {
int context_register_count_;
int return_position_;
TemporaryRegisterAllocator temporary_allocator_;
BytecodeArrayWriter bytecode_array_writer_;
BytecodePipelineStage* pipeline_;
BytecodeSourceInfo latest_source_info_;
DISALLOW_COPY_AND_ASSIGN(BytecodeArrayBuilder);
};
......
......@@ -128,26 +128,15 @@ Register BytecodeArrayIterator::GetRegisterOperand(int operand_index) const {
}
int BytecodeArrayIterator::GetRegisterOperandRange(int operand_index) const {
interpreter::OperandType operand_type =
Bytecodes::GetOperandType(current_bytecode(), operand_index);
DCHECK(Bytecodes::IsRegisterOperandType(operand_type));
switch (operand_type) {
case OperandType::kRegPair:
case OperandType::kRegOutPair:
return 2;
case OperandType::kRegOutTriple:
return 3;
default: {
if (operand_index + 1 !=
Bytecodes::NumberOfOperands(current_bytecode())) {
OperandType next_operand_type =
Bytecodes::GetOperandType(current_bytecode(), operand_index + 1);
if (OperandType::kRegCount == next_operand_type) {
return GetRegisterCountOperand(operand_index + 1);
}
}
return 1;
}
DCHECK_LE(operand_index, Bytecodes::NumberOfOperands(current_bytecode()));
const OperandType* operand_types =
Bytecodes::GetOperandTypes(current_bytecode());
DCHECK(Bytecodes::IsRegisterOperandType(operand_types[operand_index]));
if (operand_types[operand_index + 1] == OperandType::kRegCount) {
return GetRegisterCountOperand(operand_index + 1);
} else {
OperandType operand_type = operand_types[operand_index];
return Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type);
}
}
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interpreter/bytecode-array-writer.h"
#include <iomanip>
#include "src/interpreter/source-position-table.h"
namespace v8 {
namespace internal {
namespace interpreter {
BytecodeArrayWriter::BytecodeArrayWriter(
Zone* zone, SourcePositionTableBuilder* source_position_table_builder)
: bytecodes_(zone),
max_register_count_(0),
source_position_table_builder_(source_position_table_builder) {}
// override
BytecodeArrayWriter::~BytecodeArrayWriter() {}
// override
size_t BytecodeArrayWriter::FlushForOffset() { return bytecodes()->size(); }
// override
void BytecodeArrayWriter::Write(BytecodeNode* node) {
UpdateSourcePositionTable(node);
EmitBytecode(node);
}
void BytecodeArrayWriter::UpdateSourcePositionTable(
const BytecodeNode* const node) {
int bytecode_offset = static_cast<int>(bytecodes()->size());
const BytecodeSourceInfo& source_info = node->source_info();
if (source_info.is_valid()) {
source_position_table_builder_->AddPosition(bytecode_offset,
source_info.source_position(),
source_info.is_statement());
}
}
void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
OperandScale operand_scale = node->operand_scale();
if (operand_scale != OperandScale::kSingle) {
Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
bytecodes()->push_back(Bytecodes::ToByte(prefix));
}
Bytecode bytecode = node->bytecode();
bytecodes()->push_back(Bytecodes::ToByte(bytecode));
int register_operand_bitmap = Bytecodes::GetRegisterOperandBitmap(bytecode);
const uint32_t* const operands = node->operands();
const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
for (int i = 0; operand_types[i] != OperandType::kNone; ++i) {
OperandType operand_type = operand_types[i];
switch (Bytecodes::SizeOfOperand(operand_type, operand_scale)) {
case OperandSize::kNone:
UNREACHABLE();
break;
case OperandSize::kByte:
bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
break;
case OperandSize::kShort: {
uint8_t operand_bytes[2];
WriteUnalignedUInt16(operand_bytes, operands[i]);
bytecodes()->insert(bytecodes()->end(), operand_bytes,
operand_bytes + 2);
break;
}
case OperandSize::kQuad: {
uint8_t operand_bytes[4];
WriteUnalignedUInt32(operand_bytes, operands[i]);
bytecodes()->insert(bytecodes()->end(), operand_bytes,
operand_bytes + 4);
break;
}
}
if ((register_operand_bitmap >> i) & 1) {
int count;
if (operand_types[i + 1] == OperandType::kRegCount) {
count = static_cast<int>(operands[i + 1]);
} else {
count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type);
}
Register reg = Register::FromOperand(static_cast<int32_t>(operands[i]));
max_register_count_ = std::max(max_register_count_, reg.index() + count);
}
}
}
// override
void BytecodeArrayWriter::FlushBasicBlock() {}
int BytecodeArrayWriter::GetMaximumFrameSizeUsed() {
return max_register_count_ * kPointerSize;
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_INTERPRETER_BYTECODE_ARRAY_WRITER_H_
#define V8_INTERPRETER_BYTECODE_ARRAY_WRITER_H_
#include "src/interpreter/bytecode-pipeline.h"
namespace v8 {
namespace internal {
namespace interpreter {
class SourcePositionTableBuilder;
// Class for emitting bytecode as the final stage of the bytecode
// generation pipeline.
class BytecodeArrayWriter final : public BytecodePipelineStage {
public:
BytecodeArrayWriter(
Zone* zone, SourcePositionTableBuilder* source_position_table_builder);
virtual ~BytecodeArrayWriter();
void Write(BytecodeNode* node) override;
size_t FlushForOffset() override;
void FlushBasicBlock() override;
// Get the bytecode vector.
ZoneVector<uint8_t>* bytecodes() { return &bytecodes_; }
// Returns the size in bytes of the frame associated with the
// bytecode written.
int GetMaximumFrameSizeUsed();
private:
void EmitBytecode(const BytecodeNode* const node);
void UpdateSourcePositionTable(const BytecodeNode* const node);
ZoneVector<uint8_t> bytecodes_;
int max_register_count_;
SourcePositionTableBuilder* source_position_table_builder_;
DISALLOW_COPY_AND_ASSIGN(BytecodeArrayWriter);
};
} // namespace interpreter
} // namespace internal
} // namespace v8
#endif // V8_INTERPRETER_BYTECODE_ARRAY_WRITER_H_
......@@ -655,6 +655,9 @@ void BytecodeGenerator::BuildIndexedJump(Register index, size_t start_index,
.CompareOperation(Token::Value::EQ_STRICT, index)
.JumpIfTrue(&(targets[i]));
}
// TODO(oth): This should be an abort via the runtime with a
// corresponding message., An illegal bytecode should never be
// emitted in valid bytecode.
builder()->Illegal(); // Should never get here.
}
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interpreter/bytecode-peephole-optimizer.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/objects-inl.h"
#include "src/objects.h"
namespace v8 {
namespace internal {
namespace interpreter {
BytecodePeepholeOptimizer::BytecodePeepholeOptimizer(
ConstantArrayBuilder* constant_array_builder,
BytecodePipelineStage* next_stage)
: constant_array_builder_(constant_array_builder),
next_stage_(next_stage),
last_(Bytecode::kNop),
last_is_valid_(false),
last_is_discardable_(false) {
// TODO(oth): Remove last_is_valid_ and use kIllegal for last_ when
// not invalid. Currently blocked on bytecode generator emitting
// kIllegal for entry not found in jump table.
}
void BytecodePeepholeOptimizer::InvalidateLast() { last_is_valid_ = false; }
bool BytecodePeepholeOptimizer::LastIsValid() const { return last_is_valid_; }
void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) {
last_.Clone(node);
last_is_valid_ = true;
last_is_discardable_ = true;
}
// override
size_t BytecodePeepholeOptimizer::FlushForOffset() {
size_t buffered_size = next_stage_->FlushForOffset();
if (LastIsValid()) {
if (last_.bytecode() == Bytecode::kNop &&
!last_.source_info().is_statement()) {
// The Nop can be dropped as it doesn't have a statement
// position for the debugger and doesn't have any effects by
// definition.
InvalidateLast();
} else {
buffered_size += last_.Size();
last_is_discardable_ = false;
}
}
return buffered_size;
}
// override
void BytecodePeepholeOptimizer::FlushBasicBlock() {
if (LastIsValid()) {
next_stage_->Write(&last_);
InvalidateLast();
}
next_stage_->FlushBasicBlock();
}
// override
void BytecodePeepholeOptimizer::Write(BytecodeNode* node) {
// Attempt optimization if there is an earlier node to optimize with.
if (LastIsValid()) {
node = Optimize(node);
// Only output the last node if it wasn't invalidated by the optimization.
if (LastIsValid()) {
next_stage_->Write(&last_);
InvalidateLast();
}
}
if (node != nullptr) {
SetLast(node);
}
}
Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand(
const BytecodeNode* const node, int index) const {
DCHECK_LE(index, node->operand_count());
DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx);
uint32_t index_operand = node->operand(0);
return constant_array_builder_->At(index_operand);
}
bool BytecodePeepholeOptimizer::LastBytecodePutsNameInAccumulator() const {
DCHECK(LastIsValid());
return (last_.bytecode() == Bytecode::kTypeOf ||
last_.bytecode() == Bytecode::kToName ||
(last_.bytecode() == Bytecode::kLdaConstant &&
GetConstantForIndexOperand(&last_, 0)->IsName()));
}
void BytecodePeepholeOptimizer::UpdateCurrentBytecode(BytecodeNode* current) {
// Conditional jumps with boolean conditions are emiitted in
// ToBoolean form by the bytecode array builder,
// i.e. JumpIfToBooleanTrue rather JumpIfTrue. The ToBoolean element
// can be removed if the previous bytecode put a boolean value in
// the accumulator.
if (Bytecodes::IsJumpIfToBoolean(current->bytecode()) &&
Bytecodes::WritesBooleanToAccumulator(last_.bytecode())) {
Bytecode jump = Bytecodes::GetJumpWithoutToBoolean(current->bytecode());
current->set_bytecode(jump, current->operand(0), current->operand_scale());
}
}
bool BytecodePeepholeOptimizer::CanElideCurrent(
const BytecodeNode* const current) const {
if (Bytecodes::IsLdarOrStar(last_.bytecode()) &&
Bytecodes::IsLdarOrStar(current->bytecode()) &&
current->operand(0) == last_.operand(0)) {
// Ldar and Star make the accumulator and register hold equivalent
// values. Only the first bytecode is needed if there's a sequence
// of back-to-back Ldar and Star bytecodes with the same operand.
return true;
} else if (current->bytecode() == Bytecode::kToName &&
LastBytecodePutsNameInAccumulator()) {
// If the previous bytecode ensured a name was in the accumulator,
// the type coercion ToName() can be elided.
return true;
} else {
// Additional candidates for eliding current:
// (i) ToNumber if the last puts a number in the accumulator.
return false;
}
}
bool BytecodePeepholeOptimizer::CanElideLast(
const BytecodeNode* const current) const {
if (!last_is_discardable_) {
return false;
}
if (last_.bytecode() == Bytecode::kNop) {
// Nop are placeholders for holding source position information
// and can be elided.
return true;
} else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) &&
Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) {
// The accumulator is invisible to the debugger. If there is a sequence of
// consecutive accumulator loads (that don't have side effects) then only
// the final load is potentially visible.
return true;
} else {
return false;
}
}
BytecodeNode* BytecodePeepholeOptimizer::Optimize(BytecodeNode* current) {
UpdateCurrentBytecode(current);
if (CanElideCurrent(current)) {
if (current->source_info().is_valid()) {
current->set_bytecode(Bytecode::kNop);
} else {
current = nullptr;
}
} else if (CanElideLast(current)) {
if (last_.source_info().is_valid()) {
current->source_info().Update(last_.source_info());
}
InvalidateLast();
}
return current;
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_INTERPRETER_BYTECODE_PEEPHOLE_OPTIMIZER_H_
#define V8_INTERPRETER_BYTECODE_PEEPHOLE_OPTIMIZER_H_
#include "src/interpreter/bytecode-pipeline.h"
namespace v8 {
namespace internal {
namespace interpreter {
class ConstantArrayBuilder;
// An optimization stage for performing peephole optimizations on
// generated bytecode. The optimizer may buffer one bytecode
// internally.
class BytecodePeepholeOptimizer final : public BytecodePipelineStage,
public ZoneObject {
public:
BytecodePeepholeOptimizer(ConstantArrayBuilder* constant_array_builder,
BytecodePipelineStage* next_stage);
void Write(BytecodeNode* node) override;
size_t FlushForOffset() override;
void FlushBasicBlock() override;
private:
BytecodeNode* Optimize(BytecodeNode* current);
void UpdateCurrentBytecode(BytecodeNode* const current);
bool CanElideCurrent(const BytecodeNode* const current) const;
bool CanElideLast(const BytecodeNode* const current) const;
void InvalidateLast();
bool LastIsValid() const;
void SetLast(const BytecodeNode* const node);
bool LastBytecodePutsNameInAccumulator() const;
Handle<Object> GetConstantForIndexOperand(const BytecodeNode* const node,
int index) const;
ConstantArrayBuilder* constant_array_builder_;
BytecodePipelineStage* next_stage_;
BytecodeNode last_;
bool last_is_valid_;
bool last_is_discardable_;
DISALLOW_COPY_AND_ASSIGN(BytecodePeepholeOptimizer);
};
} // namespace interpreter
} // namespace internal
} // namespace v8
#endif // V8_INTERPRETER_BYTECODE_PEEPHOLE_OPTIMIZER_H_
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interpreter/bytecode-pipeline.h"
#include <iomanip>
#include "src/interpreter/source-position-table.h"
namespace v8 {
namespace internal {
namespace interpreter {
void BytecodeSourceInfo::Update(const BytecodeSourceInfo& entry) {
DCHECK(entry.is_valid());
if (!is_valid() || (entry.is_statement() && !is_statement()) ||
(entry.is_statement() && is_statement() &&
entry.source_position() > source_position())) {
// Position is updated if any of the following conditions are met:
// (1) there is no existing position.
// (2) the incoming position is a statement and the current position
// is an expression.
// (3) the existing position is a statement and the incoming
// statement has a later source position.
// Condition 3 is needed for the first statement in a function which
// may end up with later statement positions being added during bytecode
// generation.
source_position_ = entry.source_position_;
is_statement_ = entry.is_statement_;
}
}
BytecodeNode::BytecodeNode(Bytecode bytecode) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
bytecode_ = bytecode;
operand_scale_ = OperandScale::kSingle;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
bytecode_ = bytecode;
operands_[0] = operand0;
operand_scale_ = operand_scale;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 2);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operand_scale_ = operand_scale;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 3);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operands_[2] = operand2;
operand_scale_ = operand_scale;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
uint32_t operand3, OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 4);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operands_[2] = operand2;
operands_[3] = operand3;
operand_scale_ = operand_scale;
}
void BytecodeNode::set_bytecode(Bytecode bytecode) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
bytecode_ = bytecode;
operand_scale_ = OperandScale::kSingle;
}
void BytecodeNode::set_bytecode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
bytecode_ = bytecode;
operands_[0] = operand0;
operand_scale_ = operand_scale;
}
size_t BytecodeNode::Size() const {
size_t size = Bytecodes::Size(bytecode_, operand_scale_);
if (Bytecodes::OperandScaleRequiresPrefixBytecode(operand_scale_)) {
size += 1;
}
return size;
}
void BytecodeNode::Print(std::ostream& os) const {
#ifdef DEBUG
std::ios saved_state(nullptr);
saved_state.copyfmt(os);
os << Bytecodes::ToString(bytecode_);
if (Bytecodes::OperandScaleRequiresPrefixBytecode(operand_scale_)) {
Bytecode scale_prefix =
Bytecodes::OperandScaleToPrefixBytecode(operand_scale_);
os << '.' << Bytecodes::ToString(scale_prefix);
}
for (int i = 0; i < operand_count(); ++i) {
os << ' ' << std::setw(8) << std::setfill('0') << std::hex << operands_[i];
}
os.copyfmt(saved_state);
if (source_info_.is_valid()) {
os << source_info_;
}
os << '\n';
#else
os << static_cast<const void*>(this);
#endif // DEBUG
}
void BytecodeNode::Clone(const BytecodeNode* const other) {
memcpy(this, other, sizeof(*other));
}
bool BytecodeNode::operator==(const BytecodeNode& other) const {
if (this == &other) {
return true;
} else if (this->bytecode() != other.bytecode() ||
this->source_info() != other.source_info()) {
return false;
} else {
for (int i = 0; i < this->operand_count(); ++i) {
if (this->operand(i) != other.operand(i)) {
return false;
}
}
}
return true;
}
std::ostream& operator<<(std::ostream& os, const BytecodeNode& node) {
node.Print(os);
return os;
}
std::ostream& operator<<(std::ostream& os, const BytecodeSourceInfo& info) {
if (info.is_valid()) {
char description = info.is_statement() ? 'S' : 'E';
os << info.source_position() << ' ' << description << '>';
}
return os;
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_INTERPRETER_BYTECODE_PIPELINE_H_
#define V8_INTERPRETER_BYTECODE_PIPELINE_H_
#include "src/interpreter/bytecode-register-allocator.h"
#include "src/interpreter/bytecodes.h"
#include "src/zone-containers.h"
namespace v8 {
namespace internal {
namespace interpreter {
class BytecodeNode;
class BytecodeSourceInfo;
// Interface for bytecode pipeline stages.
class BytecodePipelineStage {
public:
virtual ~BytecodePipelineStage() {}
// Write bytecode node |node| into pipeline. The node is only valid
// for the duration of the call. Callee's should clone it if
// deferring Write() to the next stage.
virtual void Write(BytecodeNode* node) = 0;
// Flush state for bytecode array offset calculation. Returns the
// current size of bytecode array.
virtual size_t FlushForOffset() = 0;
// Flush state to terminate basic block.
virtual void FlushBasicBlock() = 0;
};
// Source code position information.
class BytecodeSourceInfo final {
public:
static const int kUninitializedPosition = -1;
BytecodeSourceInfo(int position = kUninitializedPosition,
bool is_statement = false)
: source_position_(position), is_statement_(is_statement) {}
// Combine later source info with current.
void Update(const BytecodeSourceInfo& entry);
int source_position() const {
DCHECK(is_valid());
return source_position_;
}
bool is_statement() const { return is_valid() && is_statement_; }
bool is_valid() const { return source_position_ != kUninitializedPosition; }
void set_invalid() { source_position_ = kUninitializedPosition; }
bool operator==(const BytecodeSourceInfo& other) const {
return source_position_ == other.source_position_ &&
is_statement_ == other.is_statement_;
}
bool operator!=(const BytecodeSourceInfo& other) const {
return source_position_ != other.source_position_ ||
is_statement_ != other.is_statement_;
}
private:
int source_position_;
bool is_statement_;
DISALLOW_COPY_AND_ASSIGN(BytecodeSourceInfo);
};
// A container for a generated bytecode, it's operands, and source information.
// These must be allocated by a BytecodeNodeAllocator instance.
class BytecodeNode final : ZoneObject {
public:
explicit BytecodeNode(Bytecode bytecode = Bytecode::kIllegal);
BytecodeNode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
OperandScale operand_scale);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, OperandScale operand_scale);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3,
OperandScale operand_scale);
void set_bytecode(Bytecode bytecode);
void set_bytecode(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale);
// Clone |other|.
void Clone(const BytecodeNode* const other);
// Print to stream |os|.
void Print(std::ostream& os) const;
// Return the size when this node is serialized to a bytecode array.
size_t Size() const;
Bytecode bytecode() const { return bytecode_; }
uint32_t operand(int i) const {
DCHECK_LT(i, operand_count());
return operands_[i];
}
uint32_t* operands() { return operands_; }
const uint32_t* operands() const { return operands_; }
int operand_count() const { return Bytecodes::NumberOfOperands(bytecode_); }
OperandScale operand_scale() const { return operand_scale_; }
const BytecodeSourceInfo& source_info() const { return source_info_; }
BytecodeSourceInfo& source_info() { return source_info_; }
bool operator==(const BytecodeNode& other) const;
bool operator!=(const BytecodeNode& other) const { return !(*this == other); }
private:
static const int kInvalidPosition = kMinInt;
static const size_t kMaxOperands = 4;
Bytecode bytecode_;
uint32_t operands_[kMaxOperands];
OperandScale operand_scale_;
BytecodeSourceInfo source_info_;
};
std::ostream& operator<<(std::ostream& os, const BytecodeSourceInfo& info);
std::ostream& operator<<(std::ostream& os, const BytecodeNode& node);
} // namespace interpreter
} // namespace internal
} // namespace v8
#endif // V8_INTERPRETER_BYTECODE_PIPELINE_H_
......@@ -41,6 +41,28 @@ struct OperandTraits {
OPERAND_TYPE_LIST(DECLARE_OPERAND_TYPE_TRAITS)
#undef DECLARE_OPERAND_TYPE_TRAITS
template <OperandType operand_type, OperandScale operand_scale>
struct OperandScaler {
template <bool, OperandSize, OperandScale>
struct Helper {
static const int kSize = 0;
};
template <OperandSize size, OperandScale scale>
struct Helper<false, size, scale> {
static const int kSize = static_cast<int>(size);
};
template <OperandSize size, OperandScale scale>
struct Helper<true, size, scale> {
static const int kSize = static_cast<int>(size) * static_cast<int>(scale);
};
static const int kSize =
Helper<OperandTraits<operand_type>::TypeInfo::kIsScalable,
OperandTraits<operand_type>::TypeInfo::kUnscaledSize,
operand_scale>::kSize;
static const OperandSize kOperandSize = static_cast<OperandSize>(kSize);
};
template <OperandType>
struct RegisterOperandTraits {
static const int kIsRegisterOperand = 0;
......@@ -61,11 +83,30 @@ template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1, OperandType operand_2, OperandType operand_3>
struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2,
operand_3> {
static OperandType GetOperandType(int i) {
DCHECK(0 <= i && i < kOperandCount);
const OperandType kOperands[] = {operand_0, operand_1, operand_2,
operand_3};
return kOperands[i];
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
operand_3, OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_1, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_2, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_3, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -98,10 +139,29 @@ struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2,
template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1, OperandType operand_2>
struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2> {
static inline OperandType GetOperandType(int i) {
DCHECK(0 <= i && i <= 2);
const OperandType kOperands[] = {operand_0, operand_1, operand_2};
return kOperands[i];
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_1, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_2, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -130,10 +190,28 @@ struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2> {
template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1>
struct BytecodeTraits<accumulator_use, operand_0, operand_1> {
static inline OperandType GetOperandType(int i) {
DCHECK(0 <= i && i < kOperandCount);
const OperandType kOperands[] = {operand_0, operand_1};
return kOperands[i];
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1,
OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
OperandScaler<operand_1, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -158,9 +236,26 @@ struct BytecodeTraits<accumulator_use, operand_0, operand_1> {
template <AccumulatorUse accumulator_use, OperandType operand_0>
struct BytecodeTraits<accumulator_use, operand_0> {
static inline OperandType GetOperandType(int i) {
DCHECK(i == 0);
return operand_0;
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<operand_0, OperandScale::k##Name>::kOperandSize, \
}; \
DCHECK_LT(static_cast<size_t>(i), arraysize(kOperandSizes)); \
return kOperandSizes[i]; \
}
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -182,9 +277,14 @@ struct BytecodeTraits<accumulator_use, operand_0> {
template <AccumulatorUse accumulator_use>
struct BytecodeTraits<accumulator_use> {
static inline OperandType GetOperandType(int i) {
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {OperandType::kNone};
return operand_types;
}
static OperandSize GetOperandSize(int i, OperandScale operand_scale) {
UNREACHABLE();
return OperandType::kNone;
return OperandSize::kNone;
}
template <OperandType ot>
......@@ -200,37 +300,22 @@ struct BytecodeTraits<accumulator_use> {
static const int kRegisterOperandBitmap = 0;
};
template <bool>
struct OperandScaler {
static int Multiply(int size, int operand_scale) { return 0; }
};
template <>
struct OperandScaler<false> {
static int Multiply(int size, int operand_scale) { return size; }
};
template <>
struct OperandScaler<true> {
static int Multiply(int size, int operand_scale) {
return size * operand_scale;
}
};
static OperandSize ScaledOperandSize(OperandType operand_type,
OperandScale operand_scale) {
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int index = static_cast<int>(operand_scale) >> 1;
switch (operand_type) {
#define CASE(Name, TypeInfo) \
case OperandType::k##Name: { \
OperandSize base_size = OperandTypeInfoTraits<TypeInfo>::kUnscaledSize; \
int size = \
OperandScaler<OperandTypeInfoTraits<TypeInfo>::kIsScalable>::Multiply( \
static_cast<int>(base_size), static_cast<int>(operand_scale)); \
OperandSize operand_size = static_cast<OperandSize>(size); \
DCHECK(operand_size == OperandSize::kByte || \
operand_size == OperandSize::kShort || \
operand_size == OperandSize::kQuad); \
return operand_size; \
#define CASE(Name, TypeInfo) \
case OperandType::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<OperandType::k##Name, \
OperandScale::kSingle>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kDouble>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kQuadruple>::kOperandSize}; \
return kOperandSizes[index]; \
}
OPERAND_TYPE_LIST(CASE)
#undef CASE
......
......@@ -74,15 +74,13 @@ const char* Bytecodes::OperandTypeToString(OperandType operand_type) {
// static
const char* Bytecodes::OperandScaleToString(OperandScale operand_scale) {
switch (operand_scale) {
case OperandScale::kSingle:
return "Single";
case OperandScale::kDouble:
return "Double";
case OperandScale::kQuadruple:
return "Quadruple";
case OperandScale::kInvalid:
UNREACHABLE();
#define CASE(Name, _) \
case OperandScale::k##Name: \
return #Name;
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return "";
}
......@@ -240,25 +238,81 @@ bool Bytecodes::WritesAccumulator(Bytecode bytecode) {
AccumulatorUse::kWrite;
}
// static
bool Bytecodes::WritesBooleanToAccumulator(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kLogicalNot:
case Bytecode::kTestEqual:
case Bytecode::kTestNotEqual:
case Bytecode::kTestEqualStrict:
case Bytecode::kTestLessThan:
case Bytecode::kTestLessThanOrEqual:
case Bytecode::kTestGreaterThan:
case Bytecode::kTestGreaterThanOrEqual:
case Bytecode::kTestInstanceOf:
case Bytecode::kTestIn:
case Bytecode::kForInDone:
return true;
default:
return false;
}
}
// static
bool Bytecodes::IsAccumulatorLoadWithoutEffects(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kLdaZero:
case Bytecode::kLdaSmi:
case Bytecode::kLdaUndefined:
case Bytecode::kLdaNull:
case Bytecode::kLdaTheHole:
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kLdaConstant:
case Bytecode::kLdar:
return true;
default:
return false;
}
}
// static
OperandType Bytecodes::GetOperandType(Bytecode bytecode, int i) {
DCHECK_LE(bytecode, Bytecode::kLast);
DCHECK_LT(i, NumberOfOperands(bytecode));
DCHECK_GE(i, 0);
return GetOperandTypes(bytecode)[i];
}
// static
const OperandType* Bytecodes::GetOperandTypes(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::GetOperandType(i);
return BytecodeTraits<__VA_ARGS__>::GetOperandTypes();
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandType::kNone;
return nullptr;
}
// static
OperandSize Bytecodes::GetOperandSize(Bytecode bytecode, int i,
OperandScale operand_scale) {
OperandType op_type = GetOperandType(bytecode, i);
return ScaledOperandSize(op_type, operand_scale);
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::GetOperandSize(i, operand_scale);
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
// static
......@@ -279,6 +333,7 @@ int Bytecodes::GetRegisterOperandBitmap(Bytecode bytecode) {
// static
int Bytecodes::GetOperandOffset(Bytecode bytecode, int i,
OperandScale operand_scale) {
DCHECK_LT(i, Bytecodes::NumberOfOperands(bytecode));
// TODO(oth): restore this to a statically determined constant.
int offset = 1;
for (int operand_index = 0; operand_index < i; ++operand_index) {
......@@ -343,6 +398,31 @@ bool Bytecodes::IsJump(Bytecode bytecode) {
return IsJumpImmediate(bytecode) || IsJumpConstant(bytecode);
}
// static
bool Bytecodes::IsJumpIfToBoolean(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfToBooleanTrue ||
bytecode == Bytecode::kJumpIfToBooleanFalse ||
bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
bytecode == Bytecode::kJumpIfToBooleanFalseConstant;
}
// static
Bytecode Bytecodes::GetJumpWithoutToBoolean(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kJumpIfToBooleanTrue:
return Bytecode::kJumpIfTrue;
case Bytecode::kJumpIfToBooleanFalse:
return Bytecode::kJumpIfFalse;
case Bytecode::kJumpIfToBooleanTrueConstant:
return Bytecode::kJumpIfTrueConstant;
case Bytecode::kJumpIfToBooleanFalseConstant:
return Bytecode::kJumpIfFalseConstant;
default:
break;
}
UNREACHABLE();
return Bytecode::kIllegal;
}
// static
bool Bytecodes::IsCallOrNew(Bytecode bytecode) {
......@@ -370,6 +450,11 @@ bool Bytecodes::IsDebugBreak(Bytecode bytecode) {
return false;
}
// static
bool Bytecodes::IsLdarOrStar(Bytecode bytecode) {
return bytecode == Bytecode::kLdar || bytecode == Bytecode::kStar;
}
// static
bool Bytecodes::IsBytecodeWithScalableOperands(Bytecode bytecode) {
switch (bytecode) {
......@@ -460,6 +545,24 @@ bool Bytecodes::IsRegisterOutputOperandType(OperandType operand_type) {
return false;
}
// static
int Bytecodes::GetNumberOfRegistersRepresentedBy(OperandType operand_type) {
switch (operand_type) {
case OperandType::kMaybeReg:
case OperandType::kReg:
case OperandType::kRegOut:
return 1;
case OperandType::kRegPair:
case OperandType::kRegOutPair:
return 2;
case OperandType::kRegOutTriple:
return 3;
default:
UNREACHABLE();
}
return 0;
}
// static
bool Bytecodes::IsUnsignedOperandType(OperandType operand_type) {
switch (operand_type) {
......@@ -474,10 +577,62 @@ bool Bytecodes::IsUnsignedOperandType(OperandType operand_type) {
}
// static
OperandScale Bytecodes::NextOperandScale(OperandScale operand_scale) {
DCHECK(operand_scale >= OperandScale::kSingle &&
operand_scale <= OperandScale::kMaxValid);
return static_cast<OperandScale>(2 * static_cast<int>(operand_scale));
OperandSize Bytecodes::SizeForSignedOperand(int value) {
if (kMinInt8 <= value && value <= kMaxInt8) {
return OperandSize::kByte;
} else if (kMinInt16 <= value && value <= kMaxInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
// static
OperandSize Bytecodes::SizeForUnsignedOperand(int value) {
DCHECK_GE(value, 0);
if (value <= kMaxUInt8) {
return OperandSize::kByte;
} else if (value <= kMaxUInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
OperandSize Bytecodes::SizeForUnsignedOperand(size_t value) {
if (value <= static_cast<size_t>(kMaxUInt8)) {
return OperandSize::kByte;
} else if (value <= static_cast<size_t>(kMaxUInt16)) {
return OperandSize::kShort;
} else if (value <= kMaxUInt32) {
return OperandSize::kQuad;
} else {
UNREACHABLE();
return OperandSize::kQuad;
}
}
OperandScale Bytecodes::OperandSizesToScale(OperandSize size0,
OperandSize size1,
OperandSize size2,
OperandSize size3) {
OperandSize upper = std::max(size0, size1);
OperandSize lower = std::max(size2, size3);
OperandSize result = std::max(upper, lower);
// Operand sizes have been scaled before calling this function.
// Currently all scalable operands are byte sized at
// OperandScale::kSingle.
STATIC_ASSERT(static_cast<int>(OperandSize::kByte) ==
static_cast<int>(OperandScale::kSingle) &&
static_cast<int>(OperandSize::kShort) ==
static_cast<int>(OperandScale::kDouble) &&
static_cast<int>(OperandSize::kQuad) ==
static_cast<int>(OperandScale::kQuadruple));
OperandScale operand_scale = static_cast<OperandScale>(result);
DCHECK(operand_scale == OperandScale::kSingle ||
operand_scale == OperandScale::kDouble ||
operand_scale == OperandScale::kQuadruple);
return operand_scale;
}
// static
......
......@@ -98,7 +98,7 @@ namespace interpreter {
OperandType::kIdx) \
\
/* Context operations */ \
V(PushContext, AccumulatorUse::kRead, OperandType::kReg) \
V(PushContext, AccumulatorUse::kRead, OperandType::kRegOut) \
V(PopContext, AccumulatorUse::kNone, OperandType::kReg) \
V(LdaContextSlot, AccumulatorUse::kWrite, OperandType::kReg, \
OperandType::kIdx) \
......@@ -250,7 +250,11 @@ namespace interpreter {
DEBUG_BREAK_BYTECODE_LIST(V) \
\
/* Illegal bytecode (terminates execution) */ \
V(Illegal, AccumulatorUse::kNone)
V(Illegal, AccumulatorUse::kNone) \
\
/* No operation (used to maintain source positions for peephole */ \
/* eliminated bytecodes). */ \
V(Nop, AccumulatorUse::kNone)
enum class AccumulatorUse : uint8_t {
kNone = 0,
......@@ -271,12 +275,16 @@ V8_INLINE AccumulatorUse operator|(AccumulatorUse lhs, AccumulatorUse rhs) {
// Enumeration of scaling factors applicable to scalable operands. Code
// relies on being able to cast values to integer scaling values.
#define OPERAND_SCALE_LIST(V) \
V(Single, 1) \
V(Double, 2) \
V(Quadruple, 4)
enum class OperandScale : uint8_t {
kSingle = 1,
kDouble = 2,
kQuadruple = 4,
kMaxValid = kQuadruple,
kInvalid = 8,
#define DECLARE_OPERAND_SCALE(Name, Scale) k##Name = Scale,
OPERAND_SCALE_LIST(DECLARE_OPERAND_SCALE)
#undef DECLARE_OPERAND_SCALE
kLast = kQuadruple
};
// Enumeration of the size classes of operand types used by
......@@ -333,7 +341,7 @@ enum class Bytecode : uint8_t {
// An interpreter Register which is located in the function's Register file
// in its stack-frame. Register hold parameters, this, and expression values.
class Register {
class Register final {
public:
explicit Register(int index = kInvalidIndex) : index_(index) {}
......@@ -464,9 +472,20 @@ class Bytecodes {
// Returns true if |bytecode| writes the accumulator.
static bool WritesAccumulator(Bytecode bytecode);
// Return true if |bytecode| writes the accumulator with a boolean value.
static bool WritesBooleanToAccumulator(Bytecode bytecode);
// Return true if |bytecode| is an accumulator load bytecode,
// e.g. LdaConstant, LdaTrue, Ldar.
static bool IsAccumulatorLoadWithoutEffects(Bytecode bytecode);
// Returns the i-th operand of |bytecode|.
static OperandType GetOperandType(Bytecode bytecode, int i);
// Returns a pointer to an array of operand types terminated in
// OperandType::kNone.
static const OperandType* GetOperandTypes(Bytecode bytecode);
// Returns the size of the i-th operand of |bytecode|.
static OperandSize GetOperandSize(Bytecode bytecode, int i,
OperandScale operand_scale);
......@@ -514,6 +533,13 @@ class Bytecodes {
// any kind of operand.
static bool IsJump(Bytecode bytecode);
// Returns true if the bytecode is a jump that internally coerces the
// accumulator to a boolean.
static bool IsJumpIfToBoolean(Bytecode bytecode);
// Returns the equivalent jump bytecode without the accumulator coercion.
static Bytecode GetJumpWithoutToBoolean(Bytecode bytecode);
// Returns true if the bytecode is a conditional jump, a jump, or a return.
static bool IsJumpOrReturn(Bytecode bytecode);
......@@ -526,6 +552,9 @@ class Bytecodes {
// Returns true if the bytecode is a debug break.
static bool IsDebugBreak(Bytecode bytecode);
// Returns true if the bytecode is Ldar or Star.
static bool IsLdarOrStar(Bytecode bytecode);
// Returns true if the bytecode has wider operand forms.
static bool IsBytecodeWithScalableOperands(Bytecode bytecode);
......@@ -541,6 +570,10 @@ class Bytecodes {
// Returns true if |operand_type| represents a register used as an output.
static bool IsRegisterOutputOperandType(OperandType operand_type);
// Returns the number of registers represented by a register operand. For
// instance, a RegPair represents two registers.
static int GetNumberOfRegistersRepresentedBy(OperandType operand_type);
// Returns true if |operand_type| is a maybe register operand
// (kMaybeReg).
static bool IsMaybeRegisterOperandType(OperandType operand_type);
......@@ -576,8 +609,21 @@ class Bytecodes {
// OperandScale values.
static bool BytecodeHasHandler(Bytecode bytecode, OperandScale operand_scale);
// Return the next larger operand scale.
static OperandScale NextOperandScale(OperandScale operand_scale);
// Return the operand size required to hold a signed operand.
static OperandSize SizeForSignedOperand(int value);
// Return the operand size required to hold an unsigned operand.
static OperandSize SizeForUnsignedOperand(int value);
// Return the operand size required to hold an unsigned operand.
static OperandSize SizeForUnsignedOperand(size_t value);
// Return the OperandScale required for bytecode emission of
// operand sizes.
static OperandScale OperandSizesToScale(
OperandSize size0, OperandSize size1 = OperandSize::kByte,
OperandSize size2 = OperandSize::kByte,
OperandSize size3 = OperandSize::kByte);
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Bytecodes);
......
......@@ -45,9 +45,13 @@ void Interpreter::Initialize() {
}
// Generate bytecode handlers for all bytecodes and scales.
for (OperandScale operand_scale = OperandScale::kSingle;
operand_scale <= OperandScale::kMaxValid;
operand_scale = Bytecodes::NextOperandScale(operand_scale)) {
const OperandScale kOperandScales[] = {
#define VALUE(Name, _) OperandScale::k##Name,
OPERAND_SCALE_LIST(VALUE)
#undef VALUE
};
for (OperandScale operand_scale : kOperandScales) {
#define GENERATE_CODE(Name, ...) \
{ \
if (Bytecodes::BytecodeHasHandler(Bytecode::k##Name, operand_scale)) { \
......@@ -93,12 +97,16 @@ size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
OperandScale operand_scale) {
static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte;
size_t index = static_cast<size_t>(bytecode);
OperandScale current_scale = OperandScale::kSingle;
while (current_scale != operand_scale) {
index += kEntriesPerOperandScale;
current_scale = Bytecodes::NextOperandScale(current_scale);
switch (operand_scale) {
case OperandScale::kSingle:
return index;
case OperandScale::kDouble:
return index + kEntriesPerOperandScale;
case OperandScale::kQuadruple:
return index + 2 * kEntriesPerOperandScale;
}
return index;
UNREACHABLE();
return 0;
}
void Interpreter::IterateDispatchTable(ObjectVisitor* v) {
......@@ -1767,6 +1775,11 @@ void Interpreter::DoIllegal(InterpreterAssembler* assembler) {
__ Abort(kInvalidBytecode);
}
// Nop
//
// No operation.
void Interpreter::DoNop(InterpreterAssembler* assembler) { __ Dispatch(); }
// SuspendGenerator <generator>
//
// Exports the register file and stores it into the generator. Also stores the
......
......@@ -115,53 +115,34 @@ void DecodeEntry(ByteArray* bytes, int* index, PositionTableEntry* entry) {
} // namespace
void SourcePositionTableBuilder::AddStatementPosition(size_t bytecode_offset,
int source_position) {
void SourcePositionTableBuilder::AddPosition(size_t bytecode_offset,
int source_position,
bool is_statement) {
int offset = static_cast<int>(bytecode_offset);
AddEntry({offset, source_position, true});
}
void SourcePositionTableBuilder::AddExpressionPosition(size_t bytecode_offset,
int source_position) {
int offset = static_cast<int>(bytecode_offset);
AddEntry({offset, source_position, false});
AddEntry({offset, source_position, is_statement});
}
void SourcePositionTableBuilder::AddEntry(const PositionTableEntry& entry) {
// Don't encode a new entry if this bytecode already has a source position
// assigned.
if (candidate_.bytecode_offset == entry.bytecode_offset) {
if (entry.is_statement) candidate_ = entry;
return;
}
CommitEntry();
candidate_ = entry;
}
void SourcePositionTableBuilder::CommitEntry() {
if (candidate_.bytecode_offset == kUninitializedCandidateOffset) return;
PositionTableEntry tmp(candidate_);
PositionTableEntry tmp(entry);
SubtractFromEntry(tmp, previous_);
EncodeEntry(bytes_, tmp);
previous_ = candidate_;
previous_ = entry;
if (candidate_.is_statement) {
if (entry.is_statement) {
LOG_CODE_EVENT(isolate_, CodeLinePosInfoAddStatementPositionEvent(
jit_handler_data_, candidate_.bytecode_offset,
candidate_.source_position));
jit_handler_data_, entry.bytecode_offset,
entry.source_position));
}
LOG_CODE_EVENT(isolate_, CodeLinePosInfoAddPositionEvent(
jit_handler_data_, candidate_.bytecode_offset,
candidate_.source_position));
jit_handler_data_, entry.bytecode_offset,
entry.source_position));
#ifdef ENABLE_SLOW_DCHECKS
raw_entries_.push_back(candidate_);
raw_entries_.push_back(entry);
#endif
}
Handle<ByteArray> SourcePositionTableBuilder::ToSourcePositionTable() {
CommitEntry();
if (bytes_.empty()) return isolate_->factory()->empty_byte_array();
Handle<ByteArray> table = isolate_->factory()->NewByteArray(
......
......@@ -34,7 +34,7 @@ struct PositionTableEntry {
bool is_statement;
};
class SourcePositionTableBuilder : public PositionsRecorder {
class SourcePositionTableBuilder final : public PositionsRecorder {
public:
SourcePositionTableBuilder(Isolate* isolate, Zone* zone)
: isolate_(isolate),
......@@ -42,16 +42,14 @@ class SourcePositionTableBuilder : public PositionsRecorder {
#ifdef ENABLE_SLOW_DCHECKS
raw_entries_(zone),
#endif
candidate_(kUninitializedCandidateOffset, 0, false) {
previous_() {
}
void AddStatementPosition(size_t bytecode_offset, int source_position);
void AddExpressionPosition(size_t bytecode_offset, int source_position);
void AddPosition(size_t bytecode_offset, int source_position,
bool is_statement);
Handle<ByteArray> ToSourcePositionTable();
private:
static const int kUninitializedCandidateOffset = -1;
void AddEntry(const PositionTableEntry& entry);
void CommitEntry();
......@@ -60,7 +58,6 @@ class SourcePositionTableBuilder : public PositionsRecorder {
#ifdef ENABLE_SLOW_DCHECKS
ZoneVector<PositionTableEntry> raw_entries_;
#endif
PositionTableEntry candidate_; // Next entry to be written, if initialized.
PositionTableEntry previous_; // Previously written entry, to compute delta.
};
......
......@@ -1580,12 +1580,15 @@ void Logger::LogCodeObjects() {
void Logger::LogBytecodeHandlers() {
if (!FLAG_ignition) return;
interpreter::Interpreter* interpreter = isolate_->interpreter();
const interpreter::OperandScale kOperandScales[] = {
#define VALUE(Name, _) interpreter::OperandScale::k##Name,
OPERAND_SCALE_LIST(VALUE)
#undef VALUE
};
const int last_index = static_cast<int>(interpreter::Bytecode::kLast);
for (auto operand_scale = interpreter::OperandScale::kSingle;
operand_scale <= interpreter::OperandScale::kMaxValid;
operand_scale =
interpreter::Bytecodes::NextOperandScale(operand_scale)) {
interpreter::Interpreter* interpreter = isolate_->interpreter();
for (auto operand_scale : kOperandScales) {
for (int index = 0; index <= last_index; ++index) {
interpreter::Bytecode bytecode = interpreter::Bytecodes::FromByte(index);
if (interpreter::Bytecodes::BytecodeHasHandler(bytecode, operand_scale)) {
......
......@@ -894,6 +894,12 @@
'interpreter/bytecode-array-builder.h',
'interpreter/bytecode-array-iterator.cc',
'interpreter/bytecode-array-iterator.h',
'interpreter/bytecode-array-writer.cc',
'interpreter/bytecode-array-writer.h',
'interpreter/bytecode-peephole-optimizer.cc',
'interpreter/bytecode-peephole-optimizer.h',
'interpreter/bytecode-pipeline.cc',
'interpreter/bytecode-pipeline.h',
'interpreter/bytecode-register-allocator.cc',
'interpreter/bytecode-register-allocator.h',
'interpreter/bytecode-generator.cc',
......
......@@ -196,7 +196,7 @@ snippet: "
class C { constructor() { count++; }}
return new C();
"
frame size: 10
frame size: 7
parameter count: 1
bytecode array length: 74
bytecodes: [
......
......@@ -15,7 +15,7 @@ snippet: "
"
frame size: 11
parameter count: 1
bytecode array length: 197
bytecode array length: 195
bytecodes: [
B(Ldar), R(new_target),
B(JumpIfUndefined), U8(12),
......@@ -64,10 +64,9 @@ bytecodes: [
B(Star), R(3),
B(LdaZero),
B(Star), R(2),
B(Jump), U8(38),
B(Jump), U8(36),
B(Ldar), R(7),
B(Throw),
B(Ldar), R(7),
B(LdaUndefined),
B(Star), R(5),
B(LdaTrue),
......@@ -111,7 +110,7 @@ bytecodes: [
constant pool: [
]
handlers: [
[30, 133, 139],
[30, 131, 137],
]
---
......@@ -120,7 +119,7 @@ snippet: "
"
frame size: 11
parameter count: 1
bytecode array length: 293
bytecode array length: 289
bytecodes: [
B(Ldar), R(new_target),
B(JumpIfUndefined), U8(18),
......@@ -131,7 +130,7 @@ bytecodes: [
B(JumpIfTrue), U8(55),
B(LdaSmi), U8(1),
B(TestEqualStrict), R(1),
B(JumpIfTrueConstant), U8(0),
B(JumpIfTrue), U8(127),
B(Illegal),
B(CallRuntime), U16(Runtime::kNewFunctionContext), R(closure), U8(1),
B(PushContext), R(0),
......@@ -172,10 +171,9 @@ bytecodes: [
B(Star), R(3),
B(LdaZero),
B(Star), R(2),
B(Jump), U8(119),
B(Jump), U8(115),
B(Ldar), R(7),
B(Throw),
B(Ldar), R(7),
/* 16 S> */ B(LdaSmi), U8(42),
B(Star), R(5),
B(LdaFalse),
......@@ -208,10 +206,9 @@ bytecodes: [
B(Star), R(3),
B(LdaSmi), U8(1),
B(Star), R(2),
B(Jump), U8(38),
B(Jump), U8(36),
B(Ldar), R(6),
B(Throw),
B(Ldar), R(6),
B(LdaUndefined),
B(Star), R(5),
B(LdaTrue),
......@@ -258,10 +255,9 @@ bytecodes: [
/* 25 S> */ B(Return),
]
constant pool: [
kInstanceTypeDontCare,
]
handlers: [
[36, 220, 226],
[36, 216, 222],
]
---
......@@ -270,7 +266,7 @@ snippet: "
"
frame size: 17
parameter count: 1
bytecode array length: 794
bytecode array length: 792
bytecodes: [
B(Ldar), R(new_target),
B(JumpIfUndefined), U8(18),
......@@ -325,7 +321,6 @@ bytecodes: [
B(JumpConstant), U8(17),
B(Ldar), R(9),
B(Throw),
B(Ldar), R(9),
B(LdaConstant), U8(0),
B(Star), R(7),
B(Ldar), R(closure),
......@@ -634,9 +629,9 @@ constant pool: [
kInstanceTypeDontCare,
]
handlers: [
[36, 712, 718],
[150, 448, 454],
[153, 399, 401],
[556, 571, 573],
[36, 710, 716],
[148, 446, 452],
[151, 397, 399],
[554, 569, 571],
]
......@@ -119,7 +119,7 @@ snippet: "
"
frame size: 3
parameter count: 1
bytecode array length: 32
bytecode array length: 24
bytecodes: [
/* 30 E> */ B(StackCheck),
/* 42 S> */ B(LdaSmi), U8(2),
......@@ -129,12 +129,8 @@ bytecodes: [
/* 56 S> */ B(LdaSmi), U8(4),
/* 56 E> */ B(Star), R(2),
/* 59 S> */ B(Ldar), R(0),
B(JumpIfToBooleanTrue), U8(16),
/* 72 E> */ B(Ldar), R(0),
/* 75 E> */ B(Ldar), R(1),
/* 78 E> */ B(Ldar), R(0),
/* 81 E> */ B(Ldar), R(1),
B(LdaSmi), U8(5),
B(JumpIfToBooleanTrue), U8(8),
/* 81 E> */ B(LdaSmi), U8(5),
/* 86 E> */ B(Star), R(2),
B(LdaSmi), U8(3),
/* 95 S> */ B(Return),
......
......@@ -22,11 +22,16 @@ class BytecodeArrayBuilderTest : public TestWithIsolateAndZone {
TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
BytecodeArrayBuilder builder(isolate(), zone(), 0, 1, 131);
Factory* factory = isolate()->factory();
CHECK_EQ(builder.locals_count(), 131);
CHECK_EQ(builder.context_count(), 1);
CHECK_EQ(builder.fixed_register_count(), 132);
Register reg(0);
Register other(reg.index() + 1);
Register wide(128);
// Emit argument creation operations.
builder.CreateArguments(CreateArgumentsType::kMappedArguments)
.CreateArguments(CreateArgumentsType::kUnmappedArguments)
......@@ -34,19 +39,27 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
// Emit constant loads.
builder.LoadLiteral(Smi::FromInt(0))
.StoreAccumulatorInRegister(reg)
.LoadLiteral(Smi::FromInt(8))
.StoreAccumulatorInRegister(reg)
.LoadLiteral(Smi::FromInt(10000000))
.StoreAccumulatorInRegister(reg)
.LoadLiteral(factory->NewStringFromStaticChars("A constant"))
.StoreAccumulatorInRegister(reg)
.LoadUndefined()
.StoreAccumulatorInRegister(reg)
.LoadNull()
.StoreAccumulatorInRegister(reg)
.LoadTheHole()
.StoreAccumulatorInRegister(reg)
.LoadTrue()
.LoadFalse();
.StoreAccumulatorInRegister(reg)
.LoadFalse()
.StoreAccumulatorInRegister(wide);
Register reg(0);
Register other(reg.index() + 1);
Register wide(128);
builder.LoadAccumulatorWithRegister(reg)
builder.StackCheck(0)
.LoadAccumulatorWithRegister(other)
.StoreAccumulatorInRegister(reg)
.LoadNull()
.StoreAccumulatorInRegister(reg);
......@@ -55,7 +68,6 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
builder.MoveRegister(reg, wide);
// Emit global load / store operations.
Factory* factory = isolate()->factory();
Handle<String> name = factory->NewStringFromStaticChars("var_name");
builder.LoadGlobal(name, 1, TypeofMode::NOT_INSIDE_TYPEOF)
.LoadGlobal(name, 1, TypeofMode::INSIDE_TYPEOF)
......@@ -331,6 +343,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
// Insert entry for illegal bytecode as this is never willingly emitted.
scorecard[Bytecodes::ToByte(Bytecode::kIllegal)] = 1;
// Insert entry for nop bytecode as this often gets optimized out.
scorecard[Bytecodes::ToByte(Bytecode::kNop)] = 1;
// Check return occurs at the end and only once in the BytecodeArray.
CHECK_EQ(final_bytecode, Bytecode::kReturn);
CHECK_EQ(scorecard[Bytecodes::ToByte(final_bytecode)], 1);
......@@ -462,7 +477,7 @@ TEST_F(BytecodeArrayBuilderTest, ForwardJumps) {
.BinaryOperation(Token::Value::ADD, reg)
.JumpIfFalse(&far4);
for (int i = 0; i < kFarJumpDistance - 18; i++) {
builder.LoadUndefined();
builder.Debugger();
}
builder.Bind(&far0).Bind(&far1).Bind(&far2).Bind(&far3).Bind(&far4);
builder.Return();
......@@ -503,7 +518,6 @@ TEST_F(BytecodeArrayBuilderTest, ForwardJumps) {
CHECK_EQ(iterator.GetImmediateOperand(0), 2);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpConstant);
CHECK_EQ(*iterator.GetConstantForIndexOperand(0),
Smi::FromInt(kFarJumpDistance));
......@@ -569,7 +583,7 @@ TEST_F(BytecodeArrayBuilderTest, BackwardJumps) {
// Add padding to force wide backwards jumps.
for (int i = 0; i < 256; i++) {
builder.LoadTrue();
builder.Debugger();
}
builder.BinaryOperation(Token::Value::ADD, reg).JumpIfFalse(&label4);
......@@ -616,7 +630,7 @@ TEST_F(BytecodeArrayBuilderTest, BackwardJumps) {
}
// Check padding to force wide backwards jumps.
for (int i = 0; i < 256; i++) {
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaTrue);
CHECK_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
iterator.Advance();
}
// Ignore binary operation.
......@@ -707,85 +721,6 @@ TEST_F(BytecodeArrayBuilderTest, LabelAddressReuse) {
CHECK(iterator.done());
}
TEST_F(BytecodeArrayBuilderTest, OperandScales) {
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kByte),
OperandScale::kSingle);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kQuad),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kQuad, OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kQuad, OperandSize::kShort,
OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kShort, OperandSize::kQuad,
OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
OperandSize::kShort, OperandSize::kShort, OperandSize::kShort,
OperandSize::kQuad),
OperandScale::kQuadruple);
}
TEST_F(BytecodeArrayBuilderTest, SizesForSignOperands) {
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(0) == OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt8) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt8) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt8 + 1) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt8 - 1) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt16) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt16) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt16 + 1) ==
OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt16 - 1) ==
OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt) ==
OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt) ==
OperandSize::kQuad);
}
TEST_F(BytecodeArrayBuilderTest, SizesForUnsignOperands) {
// int overloads
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(0) == OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt8) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt8 + 1) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt16) ==
OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt16 + 1) ==
OperandSize::kQuad);
// size_t overloads
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(static_cast<size_t>(0)) ==
OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt8)) == OperandSize::kByte);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt8 + 1)) == OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt16)) == OperandSize::kShort);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt16 + 1)) == OperandSize::kQuad);
CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt32)) == OperandSize::kQuad);
}
} // namespace interpreter
} // namespace internal
} // namespace v8
......@@ -37,11 +37,17 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
int feedback_slot = 97;
builder.LoadLiteral(heap_num_0)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(heap_num_1)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(zero)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(smi_0)
.StoreAccumulatorInRegister(reg_0)
.LoadLiteral(smi_1)
.StoreAccumulatorInRegister(reg_1)
.LoadAccumulatorWithRegister(reg_0)
.StoreAccumulatorInRegister(reg_1)
.LoadNamedProperty(reg_1, name, feedback_slot)
.StoreAccumulatorInRegister(param)
.CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, 1, reg_0)
......@@ -64,6 +70,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -72,6 +87,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaZero);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -79,6 +103,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -87,6 +120,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
......@@ -96,6 +138,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
kPrefixByteSize;
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......@@ -104,6 +155,15 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
CHECK(!iterator.done());
offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
iterator.Advance();
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLoadIC);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/v8.h"
#include "src/interpreter/bytecode-array-writer.h"
#include "src/interpreter/source-position-table.h"
#include "src/isolate.h"
#include "src/utils.h"
#include "test/unittests/interpreter/bytecode-utils.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
namespace interpreter {
class BytecodeArrayWriterUnittest : public TestWithIsolateAndZone {
public:
BytecodeArrayWriterUnittest()
: source_position_builder_(isolate(), zone()),
bytecode_array_writer_(zone(), &source_position_builder_) {}
~BytecodeArrayWriterUnittest() override {}
void Write(BytecodeNode* node, const BytecodeSourceInfo& info);
void Write(Bytecode bytecode,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0, OperandScale operand_scale,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
OperandScale operand_scale,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, OperandScale operand_scale,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3, OperandScale operand_scale,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
SourcePositionTableBuilder* source_position_builder() {
return &source_position_builder_;
}
BytecodeArrayWriter* writer() { return &bytecode_array_writer_; }
private:
SourcePositionTableBuilder source_position_builder_;
BytecodeArrayWriter bytecode_array_writer_;
};
void BytecodeArrayWriterUnittest::Write(BytecodeNode* node,
const BytecodeSourceInfo& info) {
if (info.is_valid()) {
node->source_info().Update(info);
}
writer()->Write(node);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode);
Write(&node, info);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
OperandScale operand_scale,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0, operand_scale);
Write(&node, info);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
uint32_t operand1,
OperandScale operand_scale,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0, operand1, operand_scale);
Write(&node, info);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
OperandScale operand_scale,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0, operand1, operand2, operand_scale);
Write(&node, info);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
uint32_t operand3,
OperandScale operand_scale,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0, operand1, operand2, operand3,
operand_scale);
Write(&node, info);
}
TEST_F(BytecodeArrayWriterUnittest, SimpleExample) {
CHECK_EQ(writer()->bytecodes()->size(), 0);
Write(Bytecode::kStackCheck, {10, false});
CHECK_EQ(writer()->bytecodes()->size(), 1);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 0);
Write(Bytecode::kLdaSmi, 0xff, OperandScale::kSingle, {55, true});
CHECK_EQ(writer()->bytecodes()->size(), 3);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 0);
Write(Bytecode::kLdar, Register(1).ToOperand(), OperandScale::kDouble);
CHECK_EQ(writer()->bytecodes()->size(), 7);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 2 * kPointerSize);
Write(Bytecode::kReturn, {70, true});
CHECK_EQ(writer()->bytecodes()->size(), 8);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 2 * kPointerSize);
static const uint8_t bytes[] = {B(StackCheck), B(LdaSmi), U8(0xff), B(Wide),
B(Ldar), R16(1), B(Return)};
CHECK_EQ(writer()->bytecodes()->size(), arraysize(bytes));
for (size_t i = 0; i < arraysize(bytes); ++i) {
CHECK_EQ(writer()->bytecodes()->at(i), bytes[i]);
}
CHECK_EQ(writer()->FlushForOffset(), arraysize(bytes));
writer()->FlushBasicBlock();
CHECK_EQ(writer()->bytecodes()->size(), arraysize(bytes));
PositionTableEntry expected_positions[] = {
{0, 10, false}, {1, 55, true}, {7, 70, true}};
Handle<ByteArray> source_positions =
source_position_builder()->ToSourcePositionTable();
SourcePositionTableIterator source_iterator(*source_positions);
for (size_t i = 0; i < arraysize(expected_positions); ++i) {
const PositionTableEntry& expected = expected_positions[i];
CHECK_EQ(source_iterator.bytecode_offset(), expected.bytecode_offset);
CHECK_EQ(source_iterator.source_position(), expected.source_position);
CHECK_EQ(source_iterator.is_statement(), expected.is_statement);
source_iterator.Advance();
}
CHECK(source_iterator.done());
}
TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
static const uint8_t expected_bytes[] = {
// clang-format off
/* 0 30 E> */ B(StackCheck),
/* 1 42 S> */ B(LdaConstant), U8(0),
/* 3 42 E> */ B(Star), R8(1),
/* 5 68 S> */ B(JumpIfUndefined), U8(38),
/* 7 */ B(JumpIfNull), U8(36),
/* 9 */ B(ToObject),
/* 10 */ B(Star), R8(3),
/* 12 */ B(ForInPrepare), R8(4),
/* 14 */ B(LdaZero),
/* 15 */ B(Star), R8(7),
/* 17 63 S> */ B(ForInDone), R8(7), R8(6),
/* 20 */ B(JumpIfTrue), U8(23),
/* 22 */ B(ForInNext), R8(3), R8(7), R8(4), U8(1),
/* 27 */ B(JumpIfUndefined), U8(10),
/* 29 */ B(Star), R8(0),
/* 31 54 E> */ B(StackCheck),
/* 32 */ B(Ldar), R8(0),
/* 34 */ B(Star), R8(2),
/* 36 85 S> */ B(Return),
/* 37 */ B(ForInStep), R8(7),
/* 39 */ B(Star), R8(7),
/* 41 */ B(Jump), U8(-24),
/* 43 */ B(LdaUndefined),
/* 44 85 S> */ B(Return),
// clang-format on
};
static const PositionTableEntry expected_positions[] = {
{0, 30, false}, {1, 42, true}, {3, 42, false}, {5, 68, true},
{17, 63, true}, {31, 54, false}, {36, 85, true}, {44, 85, true}};
#define R(i) static_cast<uint32_t>(Register(i).ToOperand())
Write(Bytecode::kStackCheck, {30, false});
Write(Bytecode::kLdaConstant, U8(0), OperandScale::kSingle, {42, true});
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 0 * kPointerSize);
Write(Bytecode::kStar, R(1), OperandScale::kSingle, {42, false});
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 2 * kPointerSize);
Write(Bytecode::kJumpIfUndefined, U8(38), OperandScale::kSingle, {68, true});
Write(Bytecode::kJumpIfNull, U8(36), OperandScale::kSingle);
Write(Bytecode::kToObject);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 2 * kPointerSize);
Write(Bytecode::kStar, R(3), OperandScale::kSingle);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 4 * kPointerSize);
Write(Bytecode::kForInPrepare, R(4), OperandScale::kSingle);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 7 * kPointerSize);
Write(Bytecode::kLdaZero);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 7 * kPointerSize);
Write(Bytecode::kStar, R(7), OperandScale::kSingle);
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 8 * kPointerSize);
Write(Bytecode::kForInDone, R(7), R(6), OperandScale::kSingle, {63, true});
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 8 * kPointerSize);
Write(Bytecode::kJumpIfTrue, U8(23), OperandScale::kSingle);
Write(Bytecode::kForInNext, R(3), R(7), R(4), U8(1), OperandScale::kSingle);
Write(Bytecode::kJumpIfUndefined, U8(10), OperandScale::kSingle);
Write(Bytecode::kStar, R(0), OperandScale::kSingle);
Write(Bytecode::kStackCheck, {54, false});
Write(Bytecode::kLdar, R(0), OperandScale::kSingle);
Write(Bytecode::kStar, R(2), OperandScale::kSingle);
Write(Bytecode::kReturn, {85, true});
Write(Bytecode::kForInStep, R(7), OperandScale::kSingle);
Write(Bytecode::kStar, R(7), OperandScale::kSingle);
Write(Bytecode::kJump, U8(-24), OperandScale::kSingle);
Write(Bytecode::kLdaUndefined);
Write(Bytecode::kReturn, {85, true});
CHECK_EQ(writer()->GetMaximumFrameSizeUsed(), 8 * kPointerSize);
#undef R
CHECK_EQ(writer()->bytecodes()->size(), arraysize(expected_bytes));
for (size_t i = 0; i < arraysize(expected_bytes); ++i) {
CHECK_EQ(static_cast<int>(writer()->bytecodes()->at(i)),
static_cast<int>(expected_bytes[i]));
}
Handle<ByteArray> source_positions =
source_position_builder()->ToSourcePositionTable();
SourcePositionTableIterator source_iterator(*source_positions);
for (size_t i = 0; i < arraysize(expected_positions); ++i) {
const PositionTableEntry& expected = expected_positions[i];
CHECK_EQ(source_iterator.bytecode_offset(), expected.bytecode_offset);
CHECK_EQ(source_iterator.source_position(), expected.source_position);
CHECK_EQ(source_iterator.is_statement(), expected.is_statement);
source_iterator.Advance();
}
CHECK(source_iterator.done());
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/v8.h"
#include "src/factory.h"
#include "src/interpreter/bytecode-peephole-optimizer.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/objects-inl.h"
#include "src/objects.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
namespace interpreter {
class BytecodePeepholeOptimizerTest : public BytecodePipelineStage,
public TestWithIsolateAndZone {
public:
BytecodePeepholeOptimizerTest()
: constant_array_builder_(isolate(), zone()),
peephole_optimizer_(&constant_array_builder_, this) {}
~BytecodePeepholeOptimizerTest() override {}
size_t FlushForOffset() override {
flush_for_offset_count_++;
return 0;
};
void FlushBasicBlock() override { flush_basic_block_count_++; }
void Write(BytecodeNode* node) override {
write_count_++;
last_written_.Clone(node);
}
BytecodePeepholeOptimizer* optimizer() { return &peephole_optimizer_; }
ConstantArrayBuilder* constant_array() { return &constant_array_builder_; }
int flush_for_offset_count() const { return flush_for_offset_count_; }
int flush_basic_block_count() const { return flush_basic_block_count_; }
int write_count() const { return write_count_; }
const BytecodeNode& last_written() const { return last_written_; }
private:
ConstantArrayBuilder constant_array_builder_;
BytecodePeepholeOptimizer peephole_optimizer_;
int flush_for_offset_count_ = 0;
int flush_basic_block_count_ = 0;
int write_count_ = 0;
BytecodeNode last_written_;
};
// Sanity tests.
TEST_F(BytecodePeepholeOptimizerTest, FlushForOffsetPassThrough) {
CHECK_EQ(flush_for_offset_count(), 0);
CHECK_EQ(optimizer()->FlushForOffset(), 0);
CHECK_EQ(flush_for_offset_count(), 1);
}
TEST_F(BytecodePeepholeOptimizerTest, FlushForOffsetRightSize) {
BytecodeNode node(Bytecode::kAdd, Register(0).ToOperand(),
OperandScale::kQuadruple);
optimizer()->Write(&node);
CHECK_EQ(optimizer()->FlushForOffset(), node.Size());
CHECK_EQ(flush_for_offset_count(), 1);
CHECK_EQ(write_count(), 0);
}
TEST_F(BytecodePeepholeOptimizerTest, FlushForOffsetNop) {
BytecodeNode node(Bytecode::kNop);
optimizer()->Write(&node);
CHECK_EQ(optimizer()->FlushForOffset(), 0);
CHECK_EQ(flush_for_offset_count(), 1);
CHECK_EQ(write_count(), 0);
}
TEST_F(BytecodePeepholeOptimizerTest, FlushForOffsetNopExpression) {
BytecodeNode node(Bytecode::kNop);
node.source_info().Update({3, false});
optimizer()->Write(&node);
CHECK_EQ(optimizer()->FlushForOffset(), 0);
CHECK_EQ(flush_for_offset_count(), 1);
CHECK_EQ(write_count(), 0);
}
TEST_F(BytecodePeepholeOptimizerTest, FlushForOffsetNopStatement) {
BytecodeNode node(Bytecode::kNop);
node.source_info().Update({3, true});
optimizer()->Write(&node);
CHECK_EQ(optimizer()->FlushForOffset(), node.Size());
CHECK_EQ(flush_for_offset_count(), 1);
CHECK_EQ(write_count(), 0);
}
TEST_F(BytecodePeepholeOptimizerTest, FlushBasicBlockPassThrough) {
CHECK_EQ(flush_basic_block_count(), 0);
optimizer()->FlushBasicBlock();
CHECK_EQ(flush_basic_block_count(), 1);
CHECK_EQ(write_count(), 0);
}
TEST_F(BytecodePeepholeOptimizerTest, WriteOneFlushBasicBlock) {
BytecodeNode node(Bytecode::kAdd, Register(0).ToOperand(),
OperandScale::kQuadruple);
optimizer()->Write(&node);
CHECK_EQ(write_count(), 0);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
CHECK_EQ(node, last_written());
}
// Tests covering BytecodePeepholeOptimizer::UpdateCurrentBytecode().
TEST_F(BytecodePeepholeOptimizerTest, KeepJumpIfToBooleanTrue) {
BytecodeNode first(Bytecode::kLdaNull);
BytecodeNode second(Bytecode::kJumpIfToBooleanTrue, 3, OperandScale::kSingle);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, ElideJumpIfToBooleanTrue) {
BytecodeNode first(Bytecode::kLdaTrue);
BytecodeNode second(Bytecode::kJumpIfToBooleanTrue, 3, OperandScale::kSingle);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written().bytecode(), Bytecode::kJumpIfTrue);
CHECK_EQ(last_written().operand(0), second.operand(0));
}
// Tests covering BytecodePeepholeOptimizer::CanElideCurrent().
TEST_F(BytecodePeepholeOptimizerTest, LdarRxLdarRy) {
BytecodeNode first(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
BytecodeNode second(Bytecode::kLdar, Register(1).ToOperand(),
OperandScale::kSingle);
optimizer()->Write(&first);
optimizer()->FlushForOffset(); // Prevent CanElideLast removing |first|.
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, LdarRxLdarRx) {
BytecodeNode first(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->FlushForOffset(); // Prevent CanElideLast removing |first|.
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
}
TEST_F(BytecodePeepholeOptimizerTest, LdarRxLdarRxStatement) {
BytecodeNode first(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
second.source_info().Update({0, true});
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->FlushForOffset(); // Prevent CanElideLast removing |first|.
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written().bytecode(), Bytecode::kNop);
CHECK_EQ(last_written().source_info(), second.source_info());
}
TEST_F(BytecodePeepholeOptimizerTest, LdarRxLdarRxStatementStarRy) {
BytecodeNode first(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
BytecodeNode third(Bytecode::kStar, Register(3).ToOperand(),
OperandScale::kSingle);
second.source_info().Update({0, true});
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->FlushForOffset(); // Prevent CanElideLast removing |first|.
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->Write(&third);
CHECK_EQ(write_count(), 1);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
// Source position should move |second| to |third| when |second| is elided.
third.source_info().Update(second.source_info());
CHECK_EQ(last_written(), third);
}
TEST_F(BytecodePeepholeOptimizerTest, LdarToName) {
BytecodeNode first(Bytecode::kLdar, Register(0).ToOperand(),
OperandScale::kSingle);
BytecodeNode second(Bytecode::kToName);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, ToNameToName) {
BytecodeNode first(Bytecode::kToName);
BytecodeNode second(Bytecode::kToName);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
}
TEST_F(BytecodePeepholeOptimizerTest, TypeOfToName) {
BytecodeNode first(Bytecode::kTypeOf);
BytecodeNode second(Bytecode::kToName);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
}
TEST_F(BytecodePeepholeOptimizerTest, LdaConstantStringToName) {
Handle<Object> word =
isolate()->factory()->NewStringFromStaticChars("optimizing");
size_t index = constant_array()->Insert(word);
BytecodeNode first(Bytecode::kLdaConstant, static_cast<uint32_t>(index),
OperandScale::kSingle);
BytecodeNode second(Bytecode::kToName);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
}
TEST_F(BytecodePeepholeOptimizerTest, LdaConstantNumberToName) {
Handle<Object> word = isolate()->factory()->NewNumber(0.380);
size_t index = constant_array()->Insert(word);
BytecodeNode first(Bytecode::kLdaConstant, static_cast<uint32_t>(index),
OperandScale::kSingle);
BytecodeNode second(Bytecode::kToName);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written(), second);
}
// Tests covering BytecodePeepholeOptimizer::CanElideLast().
TEST_F(BytecodePeepholeOptimizerTest, LdaTrueLdaFalseNotDiscardable) {
BytecodeNode first(Bytecode::kLdaTrue);
BytecodeNode second(Bytecode::kLdaFalse);
optimizer()->Write(&first);
optimizer()->FlushForOffset(); // Prevent discarding of |first|.
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, LdaTrueLdaFalse) {
BytecodeNode first(Bytecode::kLdaTrue);
BytecodeNode second(Bytecode::kLdaFalse);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 0);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, LdaTrueStatementLdaFalse) {
BytecodeNode first(Bytecode::kLdaTrue);
first.source_info().Update({3, false});
BytecodeNode second(Bytecode::kLdaFalse);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 0);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
second.source_info().Update(first.source_info());
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, NopStackCheck) {
BytecodeNode first(Bytecode::kNop);
BytecodeNode second(Bytecode::kStackCheck);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 0);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, NopStatementStackCheck) {
BytecodeNode first(Bytecode::kNop);
first.source_info().Update({3, false});
BytecodeNode second(Bytecode::kStackCheck);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 0);
optimizer()->FlushBasicBlock();
CHECK_EQ(write_count(), 1);
second.source_info().Update(first.source_info());
CHECK_EQ(last_written(), second);
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/v8.h"
#include "src/interpreter/bytecode-pipeline.h"
#include "src/interpreter/bytecode-register-allocator.h"
#include "src/isolate.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
namespace interpreter {
using BytecodeNodeTest = TestWithIsolateAndZone;
TEST(BytecodeSourceInfo, Operations) {
BytecodeSourceInfo x(0, true);
CHECK_EQ(x.source_position(), 0);
CHECK_EQ(x.is_statement(), true);
CHECK_EQ(x.is_valid(), true);
x.set_invalid();
CHECK_EQ(x.is_statement(), false);
CHECK_EQ(x.is_valid(), false);
x.Update({1, true});
BytecodeSourceInfo y(1, true);
CHECK(x == y);
CHECK(!(x != y));
x.set_invalid();
CHECK(!(x == y));
CHECK(x != y);
y.Update({2, false});
CHECK_EQ(y.source_position(), 1);
CHECK_EQ(y.is_statement(), true);
y.Update({2, true});
CHECK_EQ(y.source_position(), 2);
CHECK_EQ(y.is_statement(), true);
y.set_invalid();
y.Update({3, false});
CHECK_EQ(y.source_position(), 3);
CHECK_EQ(y.is_statement(), false);
y.Update({3, true});
CHECK_EQ(y.source_position(), 3);
CHECK_EQ(y.is_statement(), true);
}
TEST_F(BytecodeNodeTest, Constructor0) {
BytecodeNode node;
CHECK_EQ(node.bytecode(), Bytecode::kIllegal);
CHECK(!node.source_info().is_valid());
}
TEST_F(BytecodeNodeTest, Constructor1) {
BytecodeNode node(Bytecode::kLdaZero);
CHECK_EQ(node.bytecode(), Bytecode::kLdaZero);
CHECK_EQ(node.operand_count(), 0);
CHECK_EQ(node.operand_scale(), OperandScale::kSingle);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 1);
}
TEST_F(BytecodeNodeTest, Constructor2) {
uint32_t operands[] = {0x11};
BytecodeNode node(Bytecode::kJumpIfTrue, operands[0], OperandScale::kDouble);
CHECK_EQ(node.bytecode(), Bytecode::kJumpIfTrue);
CHECK_EQ(node.operand_count(), 1);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand_scale(), OperandScale::kDouble);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 4);
}
TEST_F(BytecodeNodeTest, Constructor3) {
uint32_t operands[] = {0x11, 0x22};
BytecodeNode node(Bytecode::kLdaGlobal, operands[0], operands[1],
OperandScale::kQuadruple);
CHECK_EQ(node.bytecode(), Bytecode::kLdaGlobal);
CHECK_EQ(node.operand_count(), 2);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand(1), operands[1]);
CHECK_EQ(node.operand_scale(), OperandScale::kQuadruple);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 10);
}
TEST_F(BytecodeNodeTest, Constructor4) {
uint32_t operands[] = {0x11, 0x22, 0x33};
BytecodeNode node(Bytecode::kLoadIC, operands[0], operands[1], operands[2],
OperandScale::kSingle);
CHECK_EQ(node.operand_count(), 3);
CHECK_EQ(node.bytecode(), Bytecode::kLoadIC);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand(1), operands[1]);
CHECK_EQ(node.operand(2), operands[2]);
CHECK_EQ(node.operand_scale(), OperandScale::kSingle);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 4);
}
TEST_F(BytecodeNodeTest, Constructor5) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
CHECK_EQ(node.operand_count(), 4);
CHECK_EQ(node.bytecode(), Bytecode::kForInNext);
CHECK_EQ(node.operand(0), operands[0]);
CHECK_EQ(node.operand(1), operands[1]);
CHECK_EQ(node.operand(2), operands[2]);
CHECK_EQ(node.operand(3), operands[3]);
CHECK_EQ(node.operand_scale(), OperandScale::kDouble);
CHECK(!node.source_info().is_valid());
CHECK_EQ(node.Size(), 10);
}
TEST_F(BytecodeNodeTest, Equality) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
CHECK_EQ(node, node);
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3], OperandScale::kDouble);
CHECK_EQ(node, other);
}
TEST_F(BytecodeNodeTest, EqualityWithSourceInfo) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
node.source_info().Update({3, true});
CHECK_EQ(node, node);
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3], OperandScale::kDouble);
other.source_info().Update({3, true});
CHECK_EQ(node, other);
}
TEST_F(BytecodeNodeTest, NoEqualityWithDifferentSourceInfo) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
node.source_info().Update({3, true});
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3], OperandScale::kDouble);
CHECK_NE(node, other);
}
TEST_F(BytecodeNodeTest, Clone) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
BytecodeNode clone;
clone.Clone(&node);
CHECK_EQ(clone, node);
}
TEST_F(BytecodeNodeTest, SetBytecode0) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
BytecodeSourceInfo source_info(77, false);
node.source_info().Update(source_info);
BytecodeNode clone;
clone.Clone(&node);
clone.set_bytecode(Bytecode::kNop);
CHECK_EQ(clone.bytecode(), Bytecode::kNop);
CHECK_EQ(clone.operand_count(), 0);
CHECK_EQ(clone.operand_scale(), OperandScale::kSingle);
CHECK_EQ(clone.source_info(), source_info);
}
TEST_F(BytecodeNodeTest, SetBytecode1) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], OperandScale::kDouble);
BytecodeSourceInfo source_info(77, false);
node.source_info().Update(source_info);
BytecodeNode clone;
clone.Clone(&node);
clone.set_bytecode(Bytecode::kJump, 0x01aabbcc, OperandScale::kQuadruple);
CHECK_EQ(clone.bytecode(), Bytecode::kJump);
CHECK_EQ(clone.operand_count(), 1);
CHECK_EQ(clone.operand(0), 0x01aabbcc);
CHECK_EQ(clone.operand_scale(), OperandScale::kQuadruple);
CHECK_EQ(clone.source_info(), source_info);
}
} // namespace interpreter
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_UNITTESTS_INTERPRETER_BYTECODE_UTILS_H_
#define V8_UNITTESTS_INTERPRETER_BYTECODE_UTILS_H_
#include "src/frames.h"
#if V8_TARGET_LITTLE_ENDIAN
#define EXTRACT(x, n) static_cast<uint8_t>((x) >> (8 * n))
#define U16(i) EXTRACT(i, 0), EXTRACT(i, 1)
#define U32(i) EXTRACT(i, 0), EXTRACT(i, 1), EXTRACT(i, 2), EXTRACT(i, 3)
#elif V8_TARGET_BIG_ENDIAN
#define EXTRACT(x, n) static_cast<uint8_t>((x) >> (8 * n))
#define U16(i) EXTRACT(i, 1), EXTRACT(i, 0)
#define U32(i) EXTRACT(i, 3), EXTRACT(i, 2), EXTRACT(i, 1), EXTRACT(i, 0)
#else
#error "Unknown Architecture"
#endif
#define U8(i) static_cast<uint8_t>(i)
#define B(Name) static_cast<uint8_t>(Bytecode::k##Name)
#define REG_OPERAND(i) \
(InterpreterFrameConstants::kRegisterFileFromFp / kPointerSize - (i))
#define R8(i) static_cast<uint8_t>(REG_OPERAND(i))
#define R16(i) U16(REG_OPERAND(i))
#define R32(i) U32(REG_OPERAND(i))
#endif // V8_UNITTESTS_INTERPRETER_BYTECODE_UTILS_H_
......@@ -7,6 +7,7 @@
#include "src/v8.h"
#include "src/interpreter/bytecodes.h"
#include "test/unittests/interpreter/bytecode-utils.h"
#include "test/unittests/test-utils.h"
namespace v8 {
......@@ -76,9 +77,13 @@ TEST(OperandConversion, RegistersParametersNoOverlap) {
}
TEST(OperandScaling, ScalableAndNonScalable) {
for (OperandScale operand_scale = OperandScale::kSingle;
operand_scale <= OperandScale::kMaxValid;
operand_scale = Bytecodes::NextOperandScale(operand_scale)) {
const OperandScale kOperandScales[] = {
#define VALUE(Name, _) OperandScale::k##Name,
OPERAND_SCALE_LIST(VALUE)
#undef VALUE
};
for (OperandScale operand_scale : kOperandScales) {
int scale = static_cast<int>(operand_scale);
CHECK_EQ(Bytecodes::Size(Bytecode::kCallRuntime, operand_scale),
1 + 2 + 2 * scale);
......@@ -176,94 +181,39 @@ TEST(Bytecodes, DecodeBytecodeAndOperands) {
const char* output;
};
#define B(Name) static_cast<uint8_t>(Bytecode::k##Name)
#define REG_OPERAND(i) \
(InterpreterFrameConstants::kRegisterFileFromFp / kPointerSize - (i))
#define REG8(i) static_cast<uint8_t>(REG_OPERAND(i))
#if V8_TARGET_LITTLE_ENDIAN
#define REG16(i) \
static_cast<uint8_t>(REG_OPERAND(i)), \
static_cast<uint8_t>(REG_OPERAND(i) >> 8)
#elif V8_TARGET_BIG_ENDIAN
#define REG16(i) \
static_cast<uint8_t>(REG_OPERAND(i) >> 8), \
static_cast<uint8_t>(REG_OPERAND(i))
#else
#error "Unknown Architecture"
#endif
const BytecodesAndResult cases[] = {
#if V8_TARGET_LITTLE_ENDIAN
{{B(LdaSmi), 0x01}, 2, 0, " LdaSmi [1]"},
{{B(Wide), B(LdaSmi), 0xe8, 0x03}, 4, 0, " LdaSmi.Wide [1000]"},
{{B(ExtraWide), B(LdaSmi), 0xa0, 0x86, 0x01, 0x00},
6,
0,
"LdaSmi.ExtraWide [100000]"},
{{B(LdaSmi), 0xff}, 2, 0, " LdaSmi [-1]"},
{{B(Wide), B(LdaSmi), 0x18, 0xfc}, 4, 0, " LdaSmi.Wide [-1000]"},
{{B(ExtraWide), B(LdaSmi), 0x60, 0x79, 0xfe, 0xff},
6,
0,
"LdaSmi.ExtraWide [-100000]"},
{{B(Star), REG8(5)}, 2, 0, " Star r5"},
{{B(Wide), B(Star), REG16(136)}, 4, 0, " Star.Wide r136"},
{{B(Wide), B(Call), REG16(134), REG16(135), 0x02, 0x00, 0xb1, 0x00},
10,
0,
"Call.Wide r134, r135, #2, [177]"},
{{B(Ldar),
static_cast<uint8_t>(Register::FromParameterIndex(2, 3).ToOperand())},
2,
3,
" Ldar a1"},
{{B(Wide), B(CreateObjectLiteral), 0x01, 0x02, 0x03, 0x04, 0xa5},
7,
0,
"CreateObjectLiteral.Wide [513], [1027], #165"},
{{B(ExtraWide), B(JumpIfNull), 0x15, 0xcd, 0x5b, 0x07},
6,
0,
"JumpIfNull.ExtraWide [123456789]"},
#elif V8_TARGET_BIG_ENDIAN
{{B(LdaSmi), 0x01}, 2, 0, " LdaSmi [1]"},
{{B(Wide), B(LdaSmi), 0x03, 0xe8}, 4, 0, " LdaSmi.Wide [1000]"},
{{B(ExtraWide), B(LdaSmi), 0x00, 0x01, 0x86, 0xa0},
6,
0,
"LdaSmi.ExtraWide [100000]"},
{{B(LdaSmi), 0xff}, 2, 0, " LdaSmi [-1]"},
{{B(Wide), B(LdaSmi), 0xfc, 0x18}, 4, 0, " LdaSmi.Wide [-1000]"},
{{B(ExtraWide), B(LdaSmi), 0xff, 0xfe, 0x79, 0x60},
6,
0,
"LdaSmi.ExtraWide [-100000]"},
{{B(Star), REG8(5)}, 2, 0, " Star r5"},
{{B(Wide), B(Star), REG16(136)}, 4, 0, " Star.Wide r136"},
{{B(Wide), B(Call), REG16(134), REG16(135), 0x00, 0x02, 0x00, 0xb1},
10,
0,
"Call.Wide r134, r135, #2, [177]"},
{{B(Ldar),
static_cast<uint8_t>(Register::FromParameterIndex(2, 3).ToOperand())},
2,
3,
" Ldar a1"},
{{B(Wide), B(CreateObjectLiteral), 0x02, 0x01, 0x04, 0x03, 0xa5},
7,
0,
"CreateObjectLiteral.Wide [513], [1027], #165"},
{{B(ExtraWide), B(JumpIfNull), 0x07, 0x5b, 0xcd, 0x15},
6,
0,
"JumpIfNull.ExtraWide [123456789]"},
#else
#error "Unknown Architecture"
#endif
{{B(LdaSmi), U8(0x01)}, 2, 0, " LdaSmi [1]"},
{{B(Wide), B(LdaSmi), U16(1000)}, 4, 0, " LdaSmi.Wide [1000]"},
{{B(ExtraWide), B(LdaSmi), U32(100000)},
6,
0,
"LdaSmi.ExtraWide [100000]"},
{{B(LdaSmi), 0xff}, 2, 0, " LdaSmi [-1]"},
{{B(Wide), B(LdaSmi), 0x18, 0xfc}, 4, 0, " LdaSmi.Wide [-1000]"},
{{B(ExtraWide), B(LdaSmi), U32(-100000)},
6,
0,
"LdaSmi.ExtraWide [-100000]"},
{{B(Star), R8(5)}, 2, 0, " Star r5"},
{{B(Wide), B(Star), R16(136)}, 4, 0, " Star.Wide r136"},
{{B(Wide), B(Call), R16(134), R16(135), U16(0x02), U16(177)},
10,
0,
"Call.Wide r134, r135, #2, [177]"},
{{B(Ldar),
static_cast<uint8_t>(Register::FromParameterIndex(2, 3).ToOperand())},
2,
3,
" Ldar a1"},
{{B(Wide), B(CreateObjectLiteral), U16(513), U16(1027), U16(165)},
7,
0,
"CreateObjectLiteral.Wide [513], [1027], #165"},
{{B(ExtraWide), B(JumpIfNull), U32(123456789)},
6,
0,
"JumpIfNull.ExtraWide [123456789]"},
};
#undef B
#undef REG_OPERAND
#undef REG8
#undef REG16
for (size_t i = 0; i < arraysize(cases); ++i) {
// Generate reference string by prepending formatted bytes.
......@@ -304,13 +254,71 @@ TEST(Bytecodes, PrefixMappings) {
}
}
TEST(OperandScale, PrefixesScale) {
CHECK(Bytecodes::NextOperandScale(OperandScale::kSingle) ==
OperandScale::kDouble);
CHECK(Bytecodes::NextOperandScale(OperandScale::kDouble) ==
OperandScale::kQuadruple);
CHECK(Bytecodes::NextOperandScale(OperandScale::kQuadruple) ==
OperandScale::kInvalid);
TEST(Bytecodes, OperandScales) {
CHECK_EQ(Bytecodes::OperandSizesToScale(OperandSize::kByte),
OperandScale::kSingle);
CHECK_EQ(Bytecodes::OperandSizesToScale(OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(Bytecodes::OperandSizesToScale(OperandSize::kQuad),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort, OperandSize::kShort),
OperandScale::kDouble);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kQuad, OperandSize::kShort,
OperandSize::kShort, OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kQuad,
OperandSize::kShort, OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kShort,
OperandSize::kQuad, OperandSize::kShort),
OperandScale::kQuadruple);
CHECK_EQ(
Bytecodes::OperandSizesToScale(OperandSize::kShort, OperandSize::kShort,
OperandSize::kShort, OperandSize::kQuad),
OperandScale::kQuadruple);
}
TEST(Bytecodes, SizesForSignedOperands) {
CHECK(Bytecodes::SizeForSignedOperand(0) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt8 + 1) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt8 - 1) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt16 + 1) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt16 - 1) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt) == OperandSize::kQuad);
}
TEST(Bytecodes, SizesForUnsignedOperands) {
// int overloads
CHECK(Bytecodes::SizeForUnsignedOperand(0) == OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt8 + 1) ==
OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(kMaxUInt16 + 1) ==
OperandSize::kQuad);
// size_t overloads
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(0)) ==
OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt8)) ==
OperandSize::kByte);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt8 + 1)) ==
OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt16)) ==
OperandSize::kShort);
CHECK(Bytecodes::SizeForUnsignedOperand(
static_cast<size_t>(kMaxUInt16 + 1)) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt32)) ==
OperandSize::kQuad);
}
TEST(OperandScale, PrefixesRequired) {
......
......@@ -25,7 +25,7 @@ static int offsets[] = {0, 1, 2, 3, 4, 30, 31, 32,
TEST_F(SourcePositionTableTest, EncodeStatement) {
SourcePositionTableBuilder builder(isolate(), zone());
for (int i = 0; i < arraysize(offsets); i++) {
builder.AddStatementPosition(offsets[i], offsets[i]);
builder.AddPosition(offsets[i], offsets[i], true);
}
// To test correctness, we rely on the assertions in ToSourcePositionTable().
......@@ -36,8 +36,8 @@ TEST_F(SourcePositionTableTest, EncodeStatement) {
TEST_F(SourcePositionTableTest, EncodeStatementDuplicates) {
SourcePositionTableBuilder builder(isolate(), zone());
for (int i = 0; i < arraysize(offsets); i++) {
builder.AddStatementPosition(offsets[i], offsets[i]);
builder.AddStatementPosition(offsets[i], offsets[i] + 1);
builder.AddPosition(offsets[i], offsets[i], true);
builder.AddPosition(offsets[i], offsets[i] + 1, true);
}
// To test correctness, we rely on the assertions in ToSourcePositionTable().
......@@ -48,7 +48,7 @@ TEST_F(SourcePositionTableTest, EncodeStatementDuplicates) {
TEST_F(SourcePositionTableTest, EncodeExpression) {
SourcePositionTableBuilder builder(isolate(), zone());
for (int i = 0; i < arraysize(offsets); i++) {
builder.AddExpressionPosition(offsets[i], offsets[i]);
builder.AddPosition(offsets[i], offsets[i], false);
}
CHECK(!builder.ToSourcePositionTable().is_null());
}
......@@ -60,9 +60,9 @@ TEST_F(SourcePositionTableTest, EncodeAscending) {
for (int i = 0; i < arraysize(offsets); i++) {
accumulator += offsets[i];
if (i % 2) {
builder.AddStatementPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, true);
} else {
builder.AddExpressionPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, false);
}
}
......@@ -70,9 +70,9 @@ TEST_F(SourcePositionTableTest, EncodeAscending) {
for (int i = 0; i < arraysize(offsets); i++) {
accumulator -= offsets[i];
if (i % 2) {
builder.AddStatementPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, true);
} else {
builder.AddExpressionPosition(accumulator, accumulator);
builder.AddPosition(accumulator, accumulator, false);
}
}
......
......@@ -96,7 +96,10 @@
'interpreter/bytecodes-unittest.cc',
'interpreter/bytecode-array-builder-unittest.cc',
'interpreter/bytecode-array-iterator-unittest.cc',
'interpreter/bytecode-array-writer-unittest.cc',
'interpreter/bytecode-peephole-optimizer-unittest.cc',
'interpreter/bytecode-register-allocator-unittest.cc',
'interpreter/bytecode-pipeline-unittest.cc',
'interpreter/constant-array-builder-unittest.cc',
'interpreter/interpreter-assembler-unittest.cc',
'interpreter/interpreter-assembler-unittest.h',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment