Commit 07ff6d9f authored by dcarney's avatar dcarney Committed by Commit bot

[turbofan] cleanup InstructionOperand a little

- ConstantOperand was using a too-small field too store its virtual register
- drop ConvertTo, replace it with simple copy
- split AllocatedOperand off from Immediate and Constant to make assignment clearer, also paving the way for small Immediates
- put zone first in *Operand::New
- driveby: drop delayed ssa deconstruction experiment

R=titzer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/1050803002

Cr-Commit-Position: refs/heads/master@{#27692}
parent a3dcfa22
......@@ -151,7 +151,8 @@ class ArmOperandConverter FINAL : public InstructionOperandConverter {
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
FrameOffset offset = linkage()->GetFrameOffset(
AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
......
......@@ -188,7 +188,8 @@ class Arm64OperandConverter FINAL : public InstructionOperandConverter {
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
FrameOffset offset = linkage()->GetFrameOffset(
AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
offset.offset());
}
......
......@@ -90,20 +90,20 @@ class InstructionOperandConverter {
}
Register ToRegister(InstructionOperand* op) {
DCHECK(op->IsRegister());
return Register::FromAllocationIndex(op->index());
return Register::FromAllocationIndex(RegisterOperand::cast(op)->index());
}
DoubleRegister ToDoubleRegister(InstructionOperand* op) {
DCHECK(op->IsDoubleRegister());
return DoubleRegister::FromAllocationIndex(op->index());
return DoubleRegister::FromAllocationIndex(
DoubleRegisterOperand::cast(op)->index());
}
Constant ToConstant(InstructionOperand* op) {
if (op->IsImmediate()) {
return gen_->code()->GetImmediate(op->index());
return gen_->code()->GetImmediate(ImmediateOperand::cast(op)->index());
}
return gen_->code()->GetConstant(op->index());
return gen_->code()->GetConstant(
ConstantOperand::cast(op)->virtual_register());
}
double ToDouble(InstructionOperand* op) { return ToConstant(op).ToFloat64(); }
......
......@@ -176,9 +176,11 @@ void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
for (int i = 0; i < operands->length(); i++) {
InstructionOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
safepoint.DefinePointerSlot(pointer->index(), zone());
safepoint.DefinePointerSlot(StackSlotOperand::cast(pointer)->index(),
zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
Register reg = Register::FromAllocationIndex(pointer->index());
Register reg = Register::FromAllocationIndex(
RegisterOperand::cast(pointer)->index());
safepoint.DefinePointerRegister(reg, zone());
}
}
......@@ -512,18 +514,19 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation,
// rather than creating an int value.
if (type == kMachBool || type == kRepBit || type == kMachInt32 ||
type == kMachInt8 || type == kMachInt16) {
translation->StoreInt32StackSlot(op->index());
translation->StoreInt32StackSlot(StackSlotOperand::cast(op)->index());
} else if (type == kMachUint32 || type == kMachUint16 ||
type == kMachUint8) {
translation->StoreUint32StackSlot(op->index());
translation->StoreUint32StackSlot(StackSlotOperand::cast(op)->index());
} else if ((type & kRepMask) == kRepTagged) {
translation->StoreStackSlot(op->index());
translation->StoreStackSlot(StackSlotOperand::cast(op)->index());
} else {
CHECK(false);
}
} else if (op->IsDoubleStackSlot()) {
DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
translation->StoreDoubleStackSlot(op->index());
translation->StoreDoubleStackSlot(
DoubleStackSlotOperand::cast(op)->index());
} else if (op->IsRegister()) {
InstructionOperandConverter converter(this, instr);
// TODO(jarin) kMachBool and kRepBit should materialize true and false
......
......@@ -15,12 +15,10 @@ namespace compiler {
typedef ZoneList<MoveOperands>::iterator op_iterator;
#ifdef ENABLE_SLOW_DCHECKS
// TODO(svenpanne) Brush up InstructionOperand with comparison?
struct InstructionOperandComparator {
bool operator()(const InstructionOperand* x,
const InstructionOperand* y) const {
return (x->kind() < y->kind()) ||
(x->kind() == y->kind() && x->index() < y->index());
return *x < *y;
}
};
#endif
......
......@@ -708,7 +708,7 @@ void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) {
PrintIndent();
os_ << range->id() << " " << type;
if (range->HasRegisterAssigned()) {
InstructionOperand op = range->GetAssignedOperand();
AllocatedOperand op = AllocatedOperand::cast(range->GetAssignedOperand());
int assigned_reg = op.index();
if (op.IsDoubleRegister()) {
os_ << " \"" << DoubleRegister::AllocationIndexToString(assigned_reg)
......@@ -718,18 +718,21 @@ void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) {
os_ << " \"" << Register::AllocationIndexToString(assigned_reg) << "\"";
}
} else if (range->IsSpilled()) {
auto top = range->TopLevel();
int index = -1;
if (range->TopLevel()->HasSpillRange()) {
if (top->HasSpillRange()) {
index = kMaxInt; // This hasn't been set yet.
} else if (top->GetSpillOperand()->IsConstant()) {
os_ << " \"const(nostack):"
<< ConstantOperand::cast(top->GetSpillOperand())->virtual_register()
<< "\"";
} else {
index = range->TopLevel()->GetSpillOperand()->index();
}
if (range->TopLevel()->Kind() == DOUBLE_REGISTERS) {
os_ << " \"double_stack:" << index << "\"";
} else if (range->TopLevel()->Kind() == GENERAL_REGISTERS) {
os_ << " \"stack:" << index << "\"";
} else {
os_ << " \"const(nostack):" << index << "\"";
index = AllocatedOperand::cast(top->GetSpillOperand())->index();
if (top->Kind() == DOUBLE_REGISTERS) {
os_ << " \"double_stack:" << index << "\"";
} else if (top->Kind() == GENERAL_REGISTERS) {
os_ << " \"stack:" << index << "\"";
}
}
}
int parent_index = -1;
......
......@@ -47,7 +47,8 @@ class IA32OperandConverter : public InstructionOperandConverter {
}
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), extra);
FrameOffset offset = linkage()->GetFrameOffset(
AllocatedOperand::cast(op)->index(), frame(), extra);
return Operand(offset.from_stack_pointer() ? esp : ebp, offset.offset());
}
......
......@@ -42,17 +42,23 @@ std::ostream& operator<<(std::ostream& os,
}
}
case InstructionOperand::CONSTANT:
return os << "[constant:" << op.index() << "]";
return os << "[constant:" << ConstantOperand::cast(op).virtual_register()
<< "]";
case InstructionOperand::IMMEDIATE:
return os << "[immediate:" << op.index() << "]";
return os << "[immediate:" << ImmediateOperand::cast(op).index() << "]";
case InstructionOperand::STACK_SLOT:
return os << "[stack:" << op.index() << "]";
return os << "[stack:" << StackSlotOperand::cast(op).index() << "]";
case InstructionOperand::DOUBLE_STACK_SLOT:
return os << "[double_stack:" << op.index() << "]";
return os << "[double_stack:" << DoubleStackSlotOperand::cast(op).index()
<< "]";
case InstructionOperand::REGISTER:
return os << "[" << conf->general_register_name(op.index()) << "|R]";
return os << "["
<< conf->general_register_name(
RegisterOperand::cast(op).index()) << "|R]";
case InstructionOperand::DOUBLE_REGISTER:
return os << "[" << conf->double_register_name(op.index()) << "|R]";
return os << "["
<< conf->double_register_name(
DoubleRegisterOperand::cast(op).index()) << "|R]";
case InstructionOperand::INVALID:
return os << "(x)";
}
......@@ -173,7 +179,7 @@ std::ostream& operator<<(std::ostream& os,
void PointerMap::RecordPointer(InstructionOperand* op, Zone* zone) {
// Do not record arguments as pointers.
if (op->IsStackSlot() && op->index() < 0) return;
if (op->IsStackSlot() && StackSlotOperand::cast(op)->index() < 0) return;
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
pointer_operands_.Add(op, zone);
}
......@@ -181,7 +187,7 @@ void PointerMap::RecordPointer(InstructionOperand* op, Zone* zone) {
void PointerMap::RemovePointer(InstructionOperand* op) {
// Do not record arguments as pointers.
if (op->IsStackSlot() && op->index() < 0) return;
if (op->IsStackSlot() && StackSlotOperand::cast(op)->index() < 0) return;
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
for (int i = 0; i < pointer_operands_.length(); ++i) {
if (pointer_operands_[i]->Equals(op)) {
......@@ -194,7 +200,7 @@ void PointerMap::RemovePointer(InstructionOperand* op) {
void PointerMap::RecordUntagged(InstructionOperand* op, Zone* zone) {
// Do not record arguments as pointers.
if (op->IsStackSlot() && op->index() < 0) return;
if (op->IsStackSlot() && StackSlotOperand::cast(op)->index() < 0) return;
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
untagged_operands_.Add(op, zone);
}
......@@ -593,6 +599,16 @@ int InstructionSequence::GetFrameStateDescriptorCount() {
}
RpoNumber InstructionSequence::InputRpo(Instruction* instr, size_t index) {
InstructionOperand* operand = instr->InputAt(index);
Constant constant =
operand->IsImmediate()
? GetImmediate(ImmediateOperand::cast(operand)->index())
: GetConstant(ConstantOperand::cast(operand)->virtual_register());
return constant.ToRpoNumber();
}
FrameStateDescriptor::FrameStateDescriptor(
Zone* zone, const FrameStateCallInfo& state_info, size_t parameters_count,
size_t locals_count, size_t stack_count, FrameStateDescriptor* outer_state)
......
This diff is collapsed.
......@@ -110,7 +110,8 @@ class MipsOperandConverter FINAL : public InstructionOperandConverter {
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
FrameOffset offset = linkage()->GetFrameOffset(
AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
......
......@@ -110,7 +110,8 @@ class MipsOperandConverter FINAL : public InstructionOperandConverter {
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
FrameOffset offset = linkage()->GetFrameOffset(
AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
......
......@@ -256,8 +256,7 @@ void MoveOptimizer::FinalizeMoves(Instruction* instr) {
loads.push_back(move);
// Replace source with copy for later use.
auto dest = move->destination();
move->set_destination(
InstructionOperand::New(code_zone(), dest->kind(), dest->index()));
move->set_destination(InstructionOperand::New(code_zone(), *dest));
continue;
}
if ((found->destination()->IsStackSlot() ||
......@@ -266,12 +265,10 @@ void MoveOptimizer::FinalizeMoves(Instruction* instr) {
move->destination()->IsDoubleStackSlot())) {
// Found a better source for this load. Smash it in place to affect other
// loads that have already been split.
InstructionOperand::Kind found_kind = found->destination()->kind();
int found_index = found->destination()->index();
auto next_dest =
InstructionOperand::New(code_zone(), found_kind, found_index);
InstructionOperand::New(code_zone(), *found->destination());
auto dest = move->destination();
found->destination()->ConvertTo(dest->kind(), dest->index());
InstructionOperand::ReplaceWith(found->destination(), dest);
move->set_destination(next_dest);
}
// move from load destination.
......
......@@ -103,7 +103,8 @@ class PPCOperandConverter FINAL : public InstructionOperandConverter {
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
FrameOffset offset = linkage()->GetFrameOffset(
AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
......
......@@ -119,7 +119,7 @@ void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
if (op->IsConstant()) {
constraint->type_ = kConstant;
constraint->value_ = ConstantOperand::cast(op)->index();
constraint->value_ = ConstantOperand::cast(op)->virtual_register();
constraint->virtual_register_ = constraint->value_;
} else if (op->IsImmediate()) {
constraint->type_ = kImmediate;
......@@ -180,29 +180,30 @@ void RegisterAllocatorVerifier::CheckConstraint(
switch (constraint->type_) {
case kConstant:
CHECK(op->IsConstant());
CHECK_EQ(op->index(), constraint->value_);
CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
constraint->value_);
return;
case kImmediate:
CHECK(op->IsImmediate());
CHECK_EQ(op->index(), constraint->value_);
CHECK_EQ(ImmediateOperand::cast(op)->index(), constraint->value_);
return;
case kRegister:
CHECK(op->IsRegister());
return;
case kFixedRegister:
CHECK(op->IsRegister());
CHECK_EQ(op->index(), constraint->value_);
CHECK_EQ(RegisterOperand::cast(op)->index(), constraint->value_);
return;
case kDoubleRegister:
CHECK(op->IsDoubleRegister());
return;
case kFixedDoubleRegister:
CHECK(op->IsDoubleRegister());
CHECK_EQ(op->index(), constraint->value_);
CHECK_EQ(DoubleRegisterOperand::cast(op)->index(), constraint->value_);
return;
case kFixedSlot:
CHECK(op->IsStackSlot());
CHECK_EQ(op->index(), constraint->value_);
CHECK_EQ(StackSlotOperand::cast(op)->index(), constraint->value_);
return;
case kSlot:
CHECK(op->IsStackSlot());
......@@ -343,11 +344,11 @@ class OperandMap : public ZoneObject {
void DropRegisters(const RegisterConfiguration* config) {
for (int i = 0; i < config->num_general_registers(); ++i) {
InstructionOperand op(InstructionOperand::REGISTER, i);
RegisterOperand op(i);
Drop(&op);
}
for (int i = 0; i < config->num_double_registers(); ++i) {
InstructionOperand op(InstructionOperand::DOUBLE_REGISTER, i);
DoubleRegisterOperand op(i);
Drop(&op);
}
}
......
This diff is collapsed.
......@@ -215,15 +215,15 @@ class InstructionOperandCache FINAL : public ZoneObject {
public:
InstructionOperandCache();
InstructionOperand* RegisterOperand(int index) {
RegisterOperand* GetRegisterOperand(int index) {
DCHECK(index >= 0 &&
index < static_cast<int>(arraysize(general_register_operands_)));
return &general_register_operands_[index];
return RegisterOperand::cast(&general_register_operands_[index]);
}
InstructionOperand* DoubleRegisterOperand(int index) {
DoubleRegisterOperand* GetDoubleRegisterOperand(int index) {
DCHECK(index >= 0 &&
index < static_cast<int>(arraysize(double_register_operands_)));
return &double_register_operands_[index];
return DoubleRegisterOperand::cast(&double_register_operands_[index]);
}
private:
......@@ -345,7 +345,7 @@ class LiveRange FINAL : public ZoneObject {
InstructionOperand* operand);
void SetSpillOperand(InstructionOperand* operand);
void SetSpillRange(SpillRange* spill_range);
void CommitSpillOperand(InstructionOperand* operand);
void CommitSpillOperand(AllocatedOperand* operand);
void CommitSpillsAtDefinition(InstructionSequence* sequence,
InstructionOperand* operand,
bool might_be_duplicated);
......@@ -423,7 +423,7 @@ class SpillRange FINAL : public ZoneObject {
RegisterKind Kind() const { return live_ranges_[0]->Kind(); }
bool IsEmpty() const { return live_ranges_.empty(); }
bool TryMerge(SpillRange* other);
void SetOperand(InstructionOperand* op);
void SetOperand(AllocatedOperand* op);
private:
LifetimePosition End() const { return end_position_; }
......
......@@ -44,7 +44,8 @@ class X64OperandConverter : public InstructionOperandConverter {
Operand ToOperand(InstructionOperand* op, int extra = 0) {
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), extra);
FrameOffset offset = linkage()->GetFrameOffset(
AllocatedOperand::cast(op)->index(), frame(), extra);
return Operand(offset.from_stack_pointer() ? rsp : rbp, offset.offset());
}
......
......@@ -409,9 +409,6 @@ DEFINE_BOOL(turbo_builtin_inlining, true, "enable builtin inlining in TurboFan")
DEFINE_BOOL(trace_turbo_inlining, false, "trace TurboFan inlining")
DEFINE_BOOL(loop_assignment_analysis, true, "perform loop assignment analysis")
DEFINE_BOOL(turbo_profiling, false, "enable profiling in TurboFan")
// TODO(dcarney): this is just for experimentation, remove when default.
DEFINE_BOOL(turbo_delay_ssa_decon, false,
"delay ssa deconstruction in TurboFan register allocator")
DEFINE_BOOL(turbo_verify_allocation, DEBUG_BOOL,
"verify register allocation in TurboFan")
DEFINE_BOOL(turbo_move_optimization, true, "optimize gap moves in TurboFan")
......
......@@ -51,11 +51,22 @@ class InterpreterState {
}
static Key KeyFor(const InstructionOperand* op) {
return Key(op->kind(), op->index());
int v = op->IsConstant() ? ConstantOperand::cast(op)->virtual_register()
: AllocatedOperand::cast(op)->index();
return Key(op->kind(), v);
}
static Value ValueFor(const InstructionOperand* op) {
return Value(op->kind(), op->index());
int v = op->IsConstant() ? ConstantOperand::cast(op)->virtual_register()
: AllocatedOperand::cast(op)->index();
return Value(op->kind(), v);
}
static InstructionOperand FromKey(Key key) {
if (key.first == InstructionOperand::CONSTANT) {
return ConstantOperand(key.second);
}
return AllocatedOperand(key.first, key.second);
}
friend std::ostream& operator<<(std::ostream& os,
......@@ -63,8 +74,8 @@ class InterpreterState {
for (OperandMap::const_iterator it = is.values_.begin();
it != is.values_.end(); ++it) {
if (it != is.values_.begin()) os << " ";
InstructionOperand source(it->first.first, it->first.second);
InstructionOperand destination(it->second.first, it->second.second);
InstructionOperand source = FromKey(it->first);
InstructionOperand destination = FromKey(it->second);
MoveOperands mo(&source, &destination);
PrintableMoveOperands pmo = {RegisterConfiguration::ArchDefault(), &mo};
os << pmo;
......@@ -115,7 +126,7 @@ class ParallelMoveCreator : public HandleAndZoneScope {
ParallelMove* parallel_move = new (main_zone()) ParallelMove(main_zone());
std::set<InstructionOperand*, InstructionOperandComparator> seen;
for (int i = 0; i < size; ++i) {
MoveOperands mo(CreateRandomOperand(), CreateRandomOperand());
MoveOperands mo(CreateRandomOperand(true), CreateRandomOperand(false));
if (!mo.IsRedundant() && seen.find(mo.destination()) == seen.end()) {
parallel_move->AddMove(mo.source(), mo.destination(), main_zone());
seen.insert(mo.destination());
......@@ -128,24 +139,24 @@ class ParallelMoveCreator : public HandleAndZoneScope {
struct InstructionOperandComparator {
bool operator()(const InstructionOperand* x,
const InstructionOperand* y) const {
return (x->kind() < y->kind()) ||
(x->kind() == y->kind() && x->index() < y->index());
return *x < *y;
}
};
InstructionOperand* CreateRandomOperand() {
InstructionOperand* CreateRandomOperand(bool is_source) {
int index = rng_->NextInt(6);
switch (rng_->NextInt(5)) {
// destination can't be Constant.
switch (rng_->NextInt(is_source ? 5 : 4)) {
case 0:
return ConstantOperand::New(index, main_zone());
return StackSlotOperand::New(main_zone(), index);
case 1:
return StackSlotOperand::New(index, main_zone());
return DoubleStackSlotOperand::New(main_zone(), index);
case 2:
return DoubleStackSlotOperand::New(index, main_zone());
return RegisterOperand::New(main_zone(), index);
case 3:
return RegisterOperand::New(index, main_zone());
return DoubleRegisterOperand::New(main_zone(), index);
case 4:
return DoubleRegisterOperand::New(index, main_zone());
return ConstantOperand::New(main_zone(), index);
}
UNREACHABLE();
return NULL;
......
......@@ -59,15 +59,15 @@ class TestCode : public HandleAndZoneScope {
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
AddGapMove(index, RegisterOperand::New(13, main_zone()),
RegisterOperand::New(13, main_zone()));
AddGapMove(index, RegisterOperand::New(main_zone(), 13),
RegisterOperand::New(main_zone(), 13));
}
void NonRedundantMoves() {
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
AddGapMove(index, ImmediateOperand::New(11, main_zone()),
RegisterOperand::New(11, main_zone()));
AddGapMove(index, ImmediateOperand::New(main_zone(), 11),
RegisterOperand::New(main_zone(), 11));
}
void Other() {
Start();
......
......@@ -76,16 +76,17 @@ InstructionSelectorTest::Stream InstructionSelectorTest::StreamBuilder::Build(
InstructionOperand* output = instr->OutputAt(i);
EXPECT_NE(InstructionOperand::IMMEDIATE, output->kind());
if (output->IsConstant()) {
s.constants_.insert(std::make_pair(
output->index(), sequence.GetConstant(output->index())));
int vreg = ConstantOperand::cast(output)->virtual_register();
s.constants_.insert(std::make_pair(vreg, sequence.GetConstant(vreg)));
}
}
for (size_t i = 0; i < instr->InputCount(); ++i) {
InstructionOperand* input = instr->InputAt(i);
EXPECT_NE(InstructionOperand::CONSTANT, input->kind());
if (input->IsImmediate()) {
s.immediates_.insert(std::make_pair(
input->index(), sequence.GetImmediate(input->index())));
int index = ImmediateOperand::cast(input)->index();
s.immediates_.insert(
std::make_pair(index, sequence.GetImmediate(index)));
}
}
s.instructions_.push_back(instr);
......
......@@ -166,7 +166,9 @@ class InstructionSelectorTest : public TestWithContext,
}
int ToVreg(const InstructionOperand* operand) const {
if (operand->IsConstant()) return operand->index();
if (operand->IsConstant()) {
return ConstantOperand::cast(operand)->virtual_register();
}
EXPECT_EQ(InstructionOperand::UNALLOCATED, operand->kind());
return UnallocatedOperand::cast(operand)->virtual_register();
}
......@@ -202,14 +204,15 @@ class InstructionSelectorTest : public TestWithContext,
Constant ToConstant(const InstructionOperand* operand) const {
ConstantMap::const_iterator i;
if (operand->IsConstant()) {
i = constants_.find(operand->index());
i = constants_.find(ConstantOperand::cast(operand)->virtual_register());
EXPECT_EQ(ConstantOperand::cast(operand)->virtual_register(), i->first);
EXPECT_FALSE(constants_.end() == i);
} else {
EXPECT_EQ(InstructionOperand::IMMEDIATE, operand->kind());
i = immediates_.find(operand->index());
i = immediates_.find(ImmediateOperand::cast(operand)->index());
EXPECT_EQ(ImmediateOperand::cast(operand)->index(), i->first);
EXPECT_FALSE(immediates_.end() == i);
}
EXPECT_EQ(operand->index(), i->first);
return i->second;
}
......
......@@ -67,12 +67,12 @@ class MoveOptimizerTest : public InstructionSequenceTest {
CHECK_NE(kNoValue, op.value_);
switch (op.type_) {
case kConstant:
return ConstantOperand::New(op.value_, zone());
return ConstantOperand::New(zone(), op.value_);
case kFixedSlot:
return StackSlotOperand::New(op.value_, zone());
return StackSlotOperand::New(zone(), op.value_);
case kFixedRegister:
CHECK(0 <= op.value_ && op.value_ < num_general_registers());
return RegisterOperand::New(op.value_, zone());
return RegisterOperand::New(zone(), op.value_);
default:
break;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment