Commit f1aa5562 authored by danno's avatar danno Committed by Commit bot

[turbofan] Create ExplicitOperands to specify operands without virtual registers

Up until now, if one wanted to specify an explicit stack location                                                                                                                                                                                                or register as an operand for an instruction, it had to also be
explicitly associated with a virtual register as a so-called
FixedRegister or FixedStackSlot.

For the implementation of tail calls, the plan is to use the gap
resolver needs to shuffle stack locations from the caller to the
tail-called callee. In order to do this, it must be possible to
explicitly address operand locations on the stack that are not
associated with virtual registers.

This CL introduces ExplictOperands, which can specify a specific
register or stack location that is not associated with virtual
register. This will allow tail calls to specify the target
locations for the necessary stack moves in the gap for the tail
call without the core register allocation having to know about
the target of the stack moves at all.

In the process this CL:
* creates a new Operand kind, ExplicitOperand, with which
  instructions can specify register and stack slots without an
  associated virtual register.
* creates a LocationOperand class from which AllocatedOperand and
  ExplicitOperand are derived and provides a common interface to
  get Register, DoubleRegister and spill slot information.
* removes RegisterOperand, DoubleRegisterOperand,
  StackSlotOperand and DoubleStackSlotOperand, they are subsumed
  by LocationOperand.
* addresses a cleanup TODO in AllocatedOperand to reduce the
  redundancy of AllocatedOperand::Kind by using machine_type() to
  determine if an operand corresponds to a general purpose or
  double register.

BUG=v8:4076
LOG=n

Review URL: https://codereview.chromium.org/1389373002

Cr-Commit-Position: refs/heads/master@{#31603}
parent aadeef9e
......@@ -100,11 +100,11 @@ class InstructionOperandConverter {
}
Register ToRegister(InstructionOperand* op) {
return RegisterOperand::cast(op)->GetRegister();
return LocationOperand::cast(op)->GetRegister();
}
DoubleRegister ToDoubleRegister(InstructionOperand* op) {
return DoubleRegisterOperand::cast(op)->GetDoubleRegister();
return LocationOperand::cast(op)->GetDoubleRegister();
}
Constant ToConstant(InstructionOperand* op) {
......
......@@ -220,14 +220,14 @@ void CodeGenerator::RecordSafepoint(ReferenceMap* references,
frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
for (auto& operand : references->reference_operands()) {
if (operand.IsStackSlot()) {
int index = StackSlotOperand::cast(operand).index();
int index = LocationOperand::cast(operand).index();
DCHECK(index >= 0);
// Safepoint table indices are 0-based from the beginning of the spill
// slot area, adjust appropriately.
index -= stackSlotToSpillSlotDelta;
safepoint.DefinePointerSlot(index, zone());
} else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
Register reg = RegisterOperand::cast(operand).GetRegister();
Register reg = LocationOperand::cast(operand).GetRegister();
safepoint.DefinePointerRegister(reg, zone());
}
}
......@@ -589,21 +589,20 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation,
MachineType type) {
if (op->IsStackSlot()) {
if (type == kMachBool || type == kRepBit) {
translation->StoreBoolStackSlot(StackSlotOperand::cast(op)->index());
translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
} else if (type == kMachInt32 || type == kMachInt8 || type == kMachInt16) {
translation->StoreInt32StackSlot(StackSlotOperand::cast(op)->index());
translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
} else if (type == kMachUint32 || type == kMachUint16 ||
type == kMachUint8) {
translation->StoreUint32StackSlot(StackSlotOperand::cast(op)->index());
translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
} else if ((type & kRepMask) == kRepTagged) {
translation->StoreStackSlot(StackSlotOperand::cast(op)->index());
translation->StoreStackSlot(LocationOperand::cast(op)->index());
} else {
CHECK(false);
}
} else if (op->IsDoubleStackSlot()) {
DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
translation->StoreDoubleStackSlot(
DoubleStackSlotOperand::cast(op)->index());
translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
} else if (op->IsRegister()) {
InstructionOperandConverter converter(this, instr);
if (type == kMachBool || type == kRepBit) {
......
......@@ -75,7 +75,7 @@ void GapResolver::PerformMove(ParallelMove* moves, MoveOperands* move) const {
// This move's source may have changed due to swaps to resolve cycles and so
// it may now be the last move in the cycle. If so remove it.
InstructionOperand source = move->source();
if (source.EqualsModuloType(destination)) {
if (source.EqualsCanonicalized(destination)) {
move->Eliminate();
return;
}
......
......@@ -120,6 +120,11 @@ class OperandGenerator {
reg.code(), GetVReg(node)));
}
InstructionOperand UseExplicit(Register reg) {
MachineType machine_type = InstructionSequence::DefaultRepresentation();
return ExplicitOperand(LocationOperand::REGISTER, machine_type, reg.code());
}
InstructionOperand UseImmediate(Node* node) {
return sequence()->AddImmediate(ToConstant(node));
}
......
......@@ -105,24 +105,22 @@ std::ostream& operator<<(std::ostream& os,
return os << "[immediate:" << imm.indexed_value() << "]";
}
}
case InstructionOperand::EXPLICIT:
case InstructionOperand::ALLOCATED: {
auto allocated = AllocatedOperand::cast(op);
switch (allocated.allocated_kind()) {
case AllocatedOperand::STACK_SLOT:
os << "[stack:" << StackSlotOperand::cast(op).index();
break;
case AllocatedOperand::DOUBLE_STACK_SLOT:
os << "[double_stack:" << DoubleStackSlotOperand::cast(op).index();
break;
case AllocatedOperand::REGISTER:
os << "[" << RegisterOperand::cast(op).GetRegister().ToString()
<< "|R";
break;
case AllocatedOperand::DOUBLE_REGISTER:
os << "["
<< DoubleRegisterOperand::cast(op).GetDoubleRegister().ToString()
<< "|R";
break;
auto allocated = LocationOperand::cast(op);
if (op.IsStackSlot()) {
os << "[stack:" << LocationOperand::cast(op).index();
} else if (op.IsDoubleStackSlot()) {
os << "[double_stack:" << LocationOperand::cast(op).index();
} else if (op.IsRegister()) {
os << "[" << LocationOperand::cast(op).GetRegister().ToString() << "|R";
} else {
DCHECK(op.IsDoubleRegister());
os << "[" << LocationOperand::cast(op).GetDoubleRegister().ToString()
<< "|R";
}
if (allocated.IsExplicit()) {
os << "|E";
}
switch (allocated.machine_type()) {
case kRepWord32:
......@@ -181,11 +179,11 @@ MoveOperands* ParallelMove::PrepareInsertAfter(MoveOperands* move) const {
MoveOperands* to_eliminate = nullptr;
for (auto curr : *this) {
if (curr->IsEliminated()) continue;
if (curr->destination().EqualsModuloType(move->source())) {
if (curr->destination().EqualsCanonicalized(move->source())) {
DCHECK(!replacement);
replacement = curr;
if (to_eliminate != nullptr) break;
} else if (curr->destination().EqualsModuloType(move->destination())) {
} else if (curr->destination().EqualsCanonicalized(move->destination())) {
DCHECK(!to_eliminate);
to_eliminate = curr;
if (replacement != nullptr) break;
......@@ -197,6 +195,16 @@ MoveOperands* ParallelMove::PrepareInsertAfter(MoveOperands* move) const {
}
ExplicitOperand::ExplicitOperand(LocationKind kind, MachineType machine_type,
int index)
: LocationOperand(EXPLICIT, kind, machine_type, index) {
DCHECK_IMPLIES(kind == REGISTER && !IsFloatingPoint(machine_type),
Register::from_code(index).IsAllocatable());
DCHECK_IMPLIES(kind == REGISTER && IsFloatingPoint(machine_type),
DoubleRegister::from_code(index).IsAllocatable());
}
Instruction::Instruction(InstructionCode opcode)
: opcode_(opcode),
bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
......@@ -263,7 +271,7 @@ std::ostream& operator<<(std::ostream& os,
void ReferenceMap::RecordReference(const AllocatedOperand& op) {
// Do not record arguments as pointers.
if (op.IsStackSlot() && StackSlotOperand::cast(op).index() < 0) return;
if (op.IsStackSlot() && LocationOperand::cast(op).index() < 0) return;
DCHECK(!op.IsDoubleRegister() && !op.IsDoubleStackSlot());
reference_operands_.push_back(op);
}
......
This diff is collapsed.
......@@ -14,10 +14,10 @@ typedef std::pair<InstructionOperand, InstructionOperand> MoveKey;
struct MoveKeyCompare {
bool operator()(const MoveKey& a, const MoveKey& b) const {
if (a.first.EqualsModuloType(b.first)) {
return a.second.CompareModuloType(b.second);
if (a.first.EqualsCanonicalized(b.first)) {
return a.second.CompareCanonicalized(b.second);
}
return a.first.CompareModuloType(b.first);
return a.first.CompareCanonicalized(b.first);
}
};
......@@ -245,12 +245,12 @@ bool IsSlot(const InstructionOperand& op) {
bool LoadCompare(const MoveOperands* a, const MoveOperands* b) {
if (!a->source().EqualsModuloType(b->source())) {
return a->source().CompareModuloType(b->source());
if (!a->source().EqualsCanonicalized(b->source())) {
return a->source().CompareCanonicalized(b->source());
}
if (IsSlot(a->destination()) && !IsSlot(b->destination())) return false;
if (!IsSlot(a->destination()) && IsSlot(b->destination())) return true;
return a->destination().CompareModuloType(b->destination());
return a->destination().CompareCanonicalized(b->destination());
}
} // namespace
......@@ -276,7 +276,7 @@ void MoveOptimizer::FinalizeMoves(Instruction* instr) {
for (auto load : loads) {
// New group.
if (group_begin == nullptr ||
!load->source().EqualsModuloType(group_begin->source())) {
!load->source().EqualsCanonicalized(group_begin->source())) {
group_begin = load;
continue;
}
......
......@@ -48,7 +48,7 @@ void VerifyAllocatedGaps(const Instruction* instr) {
void RegisterAllocatorVerifier::VerifyInput(
const OperandConstraint& constraint) {
CHECK_NE(kSameAsFirst, constraint.type_);
if (constraint.type_ != kImmediate) {
if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
constraint.virtual_register_);
}
......@@ -59,6 +59,7 @@ void RegisterAllocatorVerifier::VerifyTemp(
const OperandConstraint& constraint) {
CHECK_NE(kSameAsFirst, constraint.type_);
CHECK_NE(kImmediate, constraint.type_);
CHECK_NE(kExplicit, constraint.type_);
CHECK_NE(kConstant, constraint.type_);
}
......@@ -66,6 +67,7 @@ void RegisterAllocatorVerifier::VerifyTemp(
void RegisterAllocatorVerifier::VerifyOutput(
const OperandConstraint& constraint) {
CHECK_NE(kImmediate, constraint.type_);
CHECK_NE(kExplicit, constraint.type_);
CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
constraint.virtual_register_);
}
......@@ -143,6 +145,8 @@ void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
constraint->type_ = kConstant;
constraint->value_ = ConstantOperand::cast(op)->virtual_register();
constraint->virtual_register_ = constraint->value_;
} else if (op->IsExplicit()) {
constraint->type_ = kExplicit;
} else if (op->IsImmediate()) {
auto imm = ImmediateOperand::cast(op);
int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
......@@ -214,22 +218,25 @@ void RegisterAllocatorVerifier::CheckConstraint(
case kRegister:
CHECK(op->IsRegister());
return;
case kDoubleRegister:
CHECK(op->IsDoubleRegister());
return;
case kExplicit:
CHECK(op->IsExplicit());
return;
case kFixedRegister:
CHECK(op->IsRegister());
CHECK_EQ(RegisterOperand::cast(op)->GetDoubleRegister().code(),
CHECK_EQ(LocationOperand::cast(op)->GetRegister().code(),
constraint->value_);
return;
case kDoubleRegister:
CHECK(op->IsDoubleRegister());
return;
case kFixedDoubleRegister:
CHECK(op->IsDoubleRegister());
CHECK_EQ(DoubleRegisterOperand::cast(op)->GetDoubleRegister().code(),
CHECK_EQ(LocationOperand::cast(op)->GetDoubleRegister().code(),
constraint->value_);
return;
case kFixedSlot:
CHECK(op->IsStackSlot());
CHECK_EQ(StackSlotOperand::cast(op)->index(), constraint->value_);
CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
return;
case kSlot:
CHECK(op->IsStackSlot());
......@@ -282,7 +289,7 @@ class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject {
struct OperandLess {
bool operator()(const InstructionOperand* a,
const InstructionOperand* b) const {
return a->CompareModuloType(*b);
return a->CompareCanonicalized(*b);
}
};
......@@ -316,7 +323,7 @@ class OperandMap : public ZoneObject {
this->erase(it++);
if (it == this->end()) return;
}
if (it->first->EqualsModuloType(*o.first)) {
if (it->first->EqualsCanonicalized(*o.first)) {
++it;
if (it == this->end()) return;
} else {
......@@ -676,7 +683,10 @@ void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
const auto op_constraints = instr_constraint.operand_constraints_;
size_t count = 0;
for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
if (op_constraints[count].type_ == kImmediate) continue;
if (op_constraints[count].type_ == kImmediate ||
op_constraints[count].type_ == kExplicit) {
continue;
}
int virtual_register = op_constraints[count].virtual_register_;
auto op = instr->InputAt(i);
if (!block_maps->IsPhi(virtual_register)) {
......
......@@ -35,6 +35,7 @@ class RegisterAllocatorVerifier final : public ZoneObject {
kFixedSlot,
kNone,
kNoneDouble,
kExplicit,
kSameAsFirst
};
......
......@@ -71,7 +71,7 @@ bool IsOutputRegisterOf(Instruction* instr, Register reg) {
for (size_t i = 0; i < instr->OutputCount(); i++) {
auto output = instr->OutputAt(i);
if (output->IsRegister() &&
RegisterOperand::cast(output)->GetRegister().is(reg)) {
LocationOperand::cast(output)->GetRegister().is(reg)) {
return true;
}
}
......@@ -83,7 +83,7 @@ bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) {
for (size_t i = 0; i < instr->OutputCount(); i++) {
auto output = instr->OutputAt(i);
if (output->IsDoubleRegister() &&
DoubleRegisterOperand::cast(output)->GetDoubleRegister().is(reg)) {
LocationOperand::cast(output)->GetDoubleRegister().is(reg)) {
return true;
}
}
......@@ -161,10 +161,8 @@ bool UsePosition::HintRegister(int* register_code) const {
auto operand = reinterpret_cast<InstructionOperand*>(hint_);
int assigned_register =
operand->IsRegister()
? RegisterOperand::cast(operand)->GetRegister().code()
: DoubleRegisterOperand::cast(operand)
->GetDoubleRegister()
.code();
? LocationOperand::cast(operand)->GetRegister().code()
: LocationOperand::cast(operand)->GetDoubleRegister().code();
*register_code = assigned_register;
return true;
}
......@@ -186,17 +184,16 @@ UsePositionHintType UsePosition::HintTypeForOperand(
switch (op.kind()) {
case InstructionOperand::CONSTANT:
case InstructionOperand::IMMEDIATE:
case InstructionOperand::EXPLICIT:
return UsePositionHintType::kNone;
case InstructionOperand::UNALLOCATED:
return UsePositionHintType::kUnresolved;
case InstructionOperand::ALLOCATED:
switch (AllocatedOperand::cast(op).allocated_kind()) {
case AllocatedOperand::REGISTER:
case AllocatedOperand::DOUBLE_REGISTER:
return UsePositionHintType::kOperand;
case AllocatedOperand::STACK_SLOT:
case AllocatedOperand::DOUBLE_STACK_SLOT:
return UsePositionHintType::kNone;
if (op.IsRegister() || op.IsDoubleRegister()) {
return UsePositionHintType::kOperand;
} else {
DCHECK(op.IsStackSlot() || op.IsDoubleStackSlot());
return UsePositionHintType::kNone;
}
case InstructionOperand::INVALID:
break;
......@@ -400,12 +397,8 @@ bool LiveRange::IsTopLevel() const { return top_level_ == this; }
InstructionOperand LiveRange::GetAssignedOperand() const {
if (HasRegisterAssigned()) {
DCHECK(!spilled());
switch (kind()) {
case GENERAL_REGISTERS:
return RegisterOperand(machine_type(), assigned_register());
case DOUBLE_REGISTERS:
return DoubleRegisterOperand(machine_type(), assigned_register());
}
return AllocatedOperand(LocationOperand::REGISTER, machine_type(),
assigned_register());
}
DCHECK(spilled());
DCHECK(!HasRegisterAssigned());
......@@ -841,14 +834,7 @@ void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
auto spill_range = GetSpillRange();
int index = spill_range->assigned_slot();
switch (kind()) {
case GENERAL_REGISTERS:
return StackSlotOperand(machine_type(), index);
case DOUBLE_REGISTERS:
return DoubleStackSlotOperand(machine_type(), index);
}
UNREACHABLE();
return StackSlotOperand(kMachNone, 0);
return AllocatedOperand(LocationOperand::STACK_SLOT, machine_type(), index);
}
......@@ -1532,18 +1518,17 @@ InstructionOperand* ConstraintBuilder::AllocateFixed(
machine_type = data()->MachineTypeFor(virtual_register);
}
if (operand->HasFixedSlotPolicy()) {
AllocatedOperand::AllocatedKind kind =
IsFloatingPoint(machine_type) ? AllocatedOperand::DOUBLE_STACK_SLOT
: AllocatedOperand::STACK_SLOT;
allocated =
AllocatedOperand(kind, machine_type, operand->fixed_slot_index());
allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, machine_type,
operand->fixed_slot_index());
} else if (operand->HasFixedRegisterPolicy()) {
DCHECK(!IsFloatingPoint(machine_type));
allocated = AllocatedOperand(AllocatedOperand::REGISTER, machine_type,
operand->fixed_register_index());
} else if (operand->HasFixedDoubleRegisterPolicy()) {
DCHECK(IsFloatingPoint(machine_type));
DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER,
machine_type, operand->fixed_register_index());
allocated = AllocatedOperand(AllocatedOperand::REGISTER, machine_type,
operand->fixed_register_index());
} else {
UNREACHABLE();
}
......@@ -1594,9 +1579,9 @@ void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
AllocateFixed(output, -1, false);
// This value is produced on the stack, we never need to spill it.
if (output->IsStackSlot()) {
DCHECK(StackSlotOperand::cast(output)->index() <
DCHECK(LocationOperand::cast(output)->index() <
data()->frame()->GetSpillSlotCount());
range->SetSpillOperand(StackSlotOperand::cast(output));
range->SetSpillOperand(LocationOperand::cast(output));
range->SetSpillStartIndex(end);
assigned = true;
}
......@@ -1654,9 +1639,9 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
// This value is produced on the stack, we never need to spill it.
if (first_output->IsStackSlot()) {
DCHECK(StackSlotOperand::cast(first_output)->index() <
DCHECK(LocationOperand::cast(first_output)->index() <
data()->frame()->GetTotalFrameSlotCount());
range->SetSpillOperand(StackSlotOperand::cast(first_output));
range->SetSpillOperand(LocationOperand::cast(first_output));
range->SetSpillStartIndex(instr_index + 1);
assigned = true;
}
......@@ -1679,7 +1664,9 @@ void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
// Handle fixed input operands of second instruction.
for (size_t i = 0; i < second->InputCount(); i++) {
auto input = second->InputAt(i);
if (input->IsImmediate()) continue; // Ignore immediates.
if (input->IsImmediate() || input->IsExplicit()) {
continue; // Ignore immediates and explicitly reserved registers.
}
auto cur_input = UnallocatedOperand::cast(input);
if (cur_input->HasFixedPolicy()) {
int input_vreg = cur_input->virtual_register();
......@@ -1859,10 +1846,10 @@ TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
ConstantOperand::cast(operand)->virtual_register());
} else if (operand->IsRegister()) {
return FixedLiveRangeFor(
RegisterOperand::cast(operand)->GetRegister().code());
LocationOperand::cast(operand)->GetRegister().code());
} else if (operand->IsDoubleRegister()) {
return FixedDoubleLiveRangeFor(
DoubleRegisterOperand::cast(operand)->GetDoubleRegister().code());
LocationOperand::cast(operand)->GetDoubleRegister().code());
} else {
return nullptr;
}
......@@ -1976,7 +1963,9 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
for (size_t i = 0; i < instr->InputCount(); i++) {
auto input = instr->InputAt(i);
if (input->IsImmediate()) continue; // Ignore immediates.
if (input->IsImmediate() || input->IsExplicit()) {
continue; // Ignore immediates and explicitly reserved registers.
}
LifetimePosition use_pos;
if (input->IsUnallocated() &&
UnallocatedOperand::cast(input)->IsUsedAtStart()) {
......
......@@ -32,13 +32,17 @@ class InterpreterState {
private:
struct Key {
bool is_constant;
AllocatedOperand::AllocatedKind kind;
bool is_float;
LocationOperand::LocationKind kind;
int index;
bool operator<(const Key& other) const {
if (this->is_constant != other.is_constant) {
return this->is_constant;
}
if (this->is_float != other.is_float) {
return this->is_float;
}
if (this->kind != other.kind) {
return this->kind < other.kind;
}
......@@ -70,22 +74,24 @@ class InterpreterState {
static Key KeyFor(const InstructionOperand& op) {
bool is_constant = op.IsConstant();
AllocatedOperand::AllocatedKind kind;
bool is_float = false;
LocationOperand::LocationKind kind;
int index;
if (!is_constant) {
if (op.IsRegister()) {
index = AllocatedOperand::cast(op).GetRegister().code();
index = LocationOperand::cast(op).GetRegister().code();
} else if (op.IsDoubleRegister()) {
index = AllocatedOperand::cast(op).GetDoubleRegister().code();
index = LocationOperand::cast(op).GetDoubleRegister().code();
} else {
index = AllocatedOperand::cast(op).index();
index = LocationOperand::cast(op).index();
}
kind = AllocatedOperand::cast(op).allocated_kind();
is_float = IsFloatingPoint(LocationOperand::cast(op).machine_type());
kind = LocationOperand::cast(op).location_kind();
} else {
index = ConstantOperand::cast(op).virtual_register();
kind = AllocatedOperand::REGISTER;
kind = LocationOperand::REGISTER;
}
Key key = {is_constant, kind, index};
Key key = {is_constant, is_float, kind, index};
return key;
}
......@@ -192,18 +198,26 @@ class ParallelMoveCreator : public HandleAndZoneScope {
}
InstructionOperand CreateRandomOperand(bool is_source) {
int index = rng_->NextInt(6);
int index = rng_->NextInt(7);
// destination can't be Constant.
switch (rng_->NextInt(is_source ? 5 : 4)) {
switch (rng_->NextInt(is_source ? 7 : 6)) {
case 0:
return StackSlotOperand(RandomType(), index);
return AllocatedOperand(LocationOperand::STACK_SLOT, RandomType(),
index);
case 1:
return DoubleStackSlotOperand(RandomDoubleType(), index);
return AllocatedOperand(LocationOperand::STACK_SLOT, RandomDoubleType(),
index);
case 2:
return RegisterOperand(RandomType(), index);
return AllocatedOperand(LocationOperand::REGISTER, RandomType(), index);
case 3:
return DoubleRegisterOperand(RandomDoubleType(), index);
return AllocatedOperand(LocationOperand::REGISTER, RandomDoubleType(),
index);
case 4:
return ExplicitOperand(LocationOperand::REGISTER, RandomType(), 1);
case 5:
return ExplicitOperand(LocationOperand::STACK_SLOT, RandomType(),
index);
case 6:
return ConstantOperand(index);
}
UNREACHABLE();
......
......@@ -59,14 +59,16 @@ class TestCode : public HandleAndZoneScope {
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
AddGapMove(index, RegisterOperand(kRepWord32, 13),
RegisterOperand(kRepWord32, 13));
AddGapMove(index,
AllocatedOperand(LocationOperand::REGISTER, kRepWord32, 13),
AllocatedOperand(LocationOperand::REGISTER, kRepWord32, 13));
}
void NonRedundantMoves() {
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
AddGapMove(index, ConstantOperand(11), RegisterOperand(kRepWord32, 11));
AddGapMove(index, ConstantOperand(11),
AllocatedOperand(LocationOperand::REGISTER, kRepWord32, 11));
}
void Other() {
Start();
......
......@@ -36,6 +36,7 @@ class InstructionSequenceTest : public TestWithIsolateAndZone {
kFixedRegister,
kSlot,
kFixedSlot,
kExplicit,
kImmediate,
kNone,
kConstant,
......@@ -57,6 +58,11 @@ class InstructionSequenceTest : public TestWithIsolateAndZone {
static TestOperand Same() { return TestOperand(kSameAsFirst, VReg()); }
static TestOperand ExplicitReg(int index) {
TestOperandType type = kExplicit;
return TestOperand(type, VReg(), index);
}
static TestOperand Reg(VReg vreg, int index = kNoValue) {
TestOperandType type = kRegister;
if (index != kNoValue) type = kFixedRegister;
......
......@@ -67,10 +67,16 @@ class MoveOptimizerTest : public InstructionSequenceTest {
case kConstant:
return ConstantOperand(op.value_);
case kFixedSlot:
return StackSlotOperand(kRepWord32, op.value_);
return AllocatedOperand(LocationOperand::STACK_SLOT, kRepWord32,
op.value_);
case kFixedRegister:
CHECK(0 <= op.value_ && op.value_ < num_general_registers());
return RegisterOperand(kRepWord32, op.value_);
return AllocatedOperand(LocationOperand::REGISTER, kRepWord32,
op.value_);
case kExplicit:
CHECK(0 <= op.value_ && op.value_ < num_general_registers());
return ExplicitOperand(LocationOperand::REGISTER, kRepWord32,
op.value_);
default:
break;
}
......@@ -97,6 +103,23 @@ TEST_F(MoveOptimizerTest, RemovesRedundant) {
}
TEST_F(MoveOptimizerTest, RemovesRedundantExplicit) {
StartBlock();
auto first_instr = EmitNop();
AddMove(first_instr, Reg(0), ExplicitReg(1));
auto last_instr = EmitNop();
AddMove(last_instr, Reg(1), Reg(0));
EndBlock(Last());
Optimize();
CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
auto move = last_instr->parallel_moves()[0];
CHECK_EQ(1, NonRedundantSize(move));
CHECK(Contains(move, Reg(0), ExplicitReg(1)));
}
TEST_F(MoveOptimizerTest, SplitsConstants) {
StartBlock();
EndBlock(Last());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment