Commit f1aa5562 authored by danno's avatar danno Committed by Commit bot

[turbofan] Create ExplicitOperands to specify operands without virtual registers

Up until now, if one wanted to specify an explicit stack location                                                                                                                                                                                                or register as an operand for an instruction, it had to also be
explicitly associated with a virtual register as a so-called
FixedRegister or FixedStackSlot.

For the implementation of tail calls, the plan is to use the gap
resolver needs to shuffle stack locations from the caller to the
tail-called callee. In order to do this, it must be possible to
explicitly address operand locations on the stack that are not
associated with virtual registers.

This CL introduces ExplictOperands, which can specify a specific
register or stack location that is not associated with virtual
register. This will allow tail calls to specify the target
locations for the necessary stack moves in the gap for the tail
call without the core register allocation having to know about
the target of the stack moves at all.

In the process this CL:
* creates a new Operand kind, ExplicitOperand, with which
  instructions can specify register and stack slots without an
  associated virtual register.
* creates a LocationOperand class from which AllocatedOperand and
  ExplicitOperand are derived and provides a common interface to
  get Register, DoubleRegister and spill slot information.
* removes RegisterOperand, DoubleRegisterOperand,
  StackSlotOperand and DoubleStackSlotOperand, they are subsumed
  by LocationOperand.
* addresses a cleanup TODO in AllocatedOperand to reduce the
  redundancy of AllocatedOperand::Kind by using machine_type() to
  determine if an operand corresponds to a general purpose or
  double register.

BUG=v8:4076
LOG=n

Review URL: https://codereview.chromium.org/1389373002

Cr-Commit-Position: refs/heads/master@{#31603}
parent aadeef9e
......@@ -100,11 +100,11 @@ class InstructionOperandConverter {
}
Register ToRegister(InstructionOperand* op) {
return RegisterOperand::cast(op)->GetRegister();
return LocationOperand::cast(op)->GetRegister();
}
DoubleRegister ToDoubleRegister(InstructionOperand* op) {
return DoubleRegisterOperand::cast(op)->GetDoubleRegister();
return LocationOperand::cast(op)->GetDoubleRegister();
}
Constant ToConstant(InstructionOperand* op) {
......
......@@ -220,14 +220,14 @@ void CodeGenerator::RecordSafepoint(ReferenceMap* references,
frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
for (auto& operand : references->reference_operands()) {
if (operand.IsStackSlot()) {
int index = StackSlotOperand::cast(operand).index();
int index = LocationOperand::cast(operand).index();
DCHECK(index >= 0);
// Safepoint table indices are 0-based from the beginning of the spill
// slot area, adjust appropriately.
index -= stackSlotToSpillSlotDelta;
safepoint.DefinePointerSlot(index, zone());
} else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
Register reg = RegisterOperand::cast(operand).GetRegister();
Register reg = LocationOperand::cast(operand).GetRegister();
safepoint.DefinePointerRegister(reg, zone());
}
}
......@@ -589,21 +589,20 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation,
MachineType type) {
if (op->IsStackSlot()) {
if (type == kMachBool || type == kRepBit) {
translation->StoreBoolStackSlot(StackSlotOperand::cast(op)->index());
translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
} else if (type == kMachInt32 || type == kMachInt8 || type == kMachInt16) {
translation->StoreInt32StackSlot(StackSlotOperand::cast(op)->index());
translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
} else if (type == kMachUint32 || type == kMachUint16 ||
type == kMachUint8) {
translation->StoreUint32StackSlot(StackSlotOperand::cast(op)->index());
translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
} else if ((type & kRepMask) == kRepTagged) {
translation->StoreStackSlot(StackSlotOperand::cast(op)->index());
translation->StoreStackSlot(LocationOperand::cast(op)->index());
} else {
CHECK(false);
}
} else if (op->IsDoubleStackSlot()) {
DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
translation->StoreDoubleStackSlot(
DoubleStackSlotOperand::cast(op)->index());
translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
} else if (op->IsRegister()) {
InstructionOperandConverter converter(this, instr);
if (type == kMachBool || type == kRepBit) {
......
......@@ -75,7 +75,7 @@ void GapResolver::PerformMove(ParallelMove* moves, MoveOperands* move) const {
// This move's source may have changed due to swaps to resolve cycles and so
// it may now be the last move in the cycle. If so remove it.
InstructionOperand source = move->source();
if (source.EqualsModuloType(destination)) {
if (source.EqualsCanonicalized(destination)) {
move->Eliminate();
return;
}
......
......@@ -120,6 +120,11 @@ class OperandGenerator {
reg.code(), GetVReg(node)));
}
InstructionOperand UseExplicit(Register reg) {
MachineType machine_type = InstructionSequence::DefaultRepresentation();
return ExplicitOperand(LocationOperand::REGISTER, machine_type, reg.code());
}
InstructionOperand UseImmediate(Node* node) {
return sequence()->AddImmediate(ToConstant(node));
}
......
......@@ -105,24 +105,22 @@ std::ostream& operator<<(std::ostream& os,
return os << "[immediate:" << imm.indexed_value() << "]";
}
}
case InstructionOperand::EXPLICIT:
case InstructionOperand::ALLOCATED: {
auto allocated = AllocatedOperand::cast(op);
switch (allocated.allocated_kind()) {
case AllocatedOperand::STACK_SLOT:
os << "[stack:" << StackSlotOperand::cast(op).index();
break;
case AllocatedOperand::DOUBLE_STACK_SLOT:
os << "[double_stack:" << DoubleStackSlotOperand::cast(op).index();
break;
case AllocatedOperand::REGISTER:
os << "[" << RegisterOperand::cast(op).GetRegister().ToString()
<< "|R";
break;
case AllocatedOperand::DOUBLE_REGISTER:
os << "["
<< DoubleRegisterOperand::cast(op).GetDoubleRegister().ToString()
<< "|R";
break;
auto allocated = LocationOperand::cast(op);
if (op.IsStackSlot()) {
os << "[stack:" << LocationOperand::cast(op).index();
} else if (op.IsDoubleStackSlot()) {
os << "[double_stack:" << LocationOperand::cast(op).index();
} else if (op.IsRegister()) {
os << "[" << LocationOperand::cast(op).GetRegister().ToString() << "|R";
} else {
DCHECK(op.IsDoubleRegister());
os << "[" << LocationOperand::cast(op).GetDoubleRegister().ToString()
<< "|R";
}
if (allocated.IsExplicit()) {
os << "|E";
}
switch (allocated.machine_type()) {
case kRepWord32:
......@@ -181,11 +179,11 @@ MoveOperands* ParallelMove::PrepareInsertAfter(MoveOperands* move) const {
MoveOperands* to_eliminate = nullptr;
for (auto curr : *this) {
if (curr->IsEliminated()) continue;
if (curr->destination().EqualsModuloType(move->source())) {
if (curr->destination().EqualsCanonicalized(move->source())) {
DCHECK(!replacement);
replacement = curr;
if (to_eliminate != nullptr) break;
} else if (curr->destination().EqualsModuloType(move->destination())) {
} else if (curr->destination().EqualsCanonicalized(move->destination())) {
DCHECK(!to_eliminate);
to_eliminate = curr;
if (replacement != nullptr) break;
......@@ -197,6 +195,16 @@ MoveOperands* ParallelMove::PrepareInsertAfter(MoveOperands* move) const {
}
ExplicitOperand::ExplicitOperand(LocationKind kind, MachineType machine_type,
int index)
: LocationOperand(EXPLICIT, kind, machine_type, index) {
DCHECK_IMPLIES(kind == REGISTER && !IsFloatingPoint(machine_type),
Register::from_code(index).IsAllocatable());
DCHECK_IMPLIES(kind == REGISTER && IsFloatingPoint(machine_type),
DoubleRegister::from_code(index).IsAllocatable());
}
Instruction::Instruction(InstructionCode opcode)
: opcode_(opcode),
bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
......@@ -263,7 +271,7 @@ std::ostream& operator<<(std::ostream& os,
void ReferenceMap::RecordReference(const AllocatedOperand& op) {
// Do not record arguments as pointers.
if (op.IsStackSlot() && StackSlotOperand::cast(op).index() < 0) return;
if (op.IsStackSlot() && LocationOperand::cast(op).index() < 0) return;
DCHECK(!op.IsDoubleRegister() && !op.IsDoubleStackSlot());
reference_operands_.push_back(op);
}
......
......@@ -31,7 +31,7 @@ class InstructionOperand {
// TODO(dcarney): recover bit. INVALID can be represented as UNALLOCATED with
// kInvalidVirtualRegister and some DCHECKS.
enum Kind { INVALID, UNALLOCATED, CONSTANT, IMMEDIATE, ALLOCATED };
enum Kind { INVALID, UNALLOCATED, CONSTANT, IMMEDIATE, EXPLICIT, ALLOCATED };
InstructionOperand() : InstructionOperand(INVALID) {}
......@@ -40,9 +40,25 @@ class InstructionOperand {
#define INSTRUCTION_OPERAND_PREDICATE(name, type) \
bool Is##name() const { return kind() == type; }
INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
// UnallocatedOperands are place-holder operands created before register
// allocation. They later are assigned registers and become AllocatedOperands.
INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
// Constant operands participate in register allocation. They are allocated to
// registers but have a special "spilling" behavior. When a ConstantOperand
// value must be rematerialized, it is loaded from an immediate constant
// rather from an unspilled slot.
INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
// ImmediateOperands do not participate in register allocation and are only
// embedded directly in instructions, e.g. small integers and on some
// platforms Objects.
INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
// ExplicitOperands do not participate in register allocation. They are
// created by the instruction selector for direct access to registers and
// stack slots, completely bypassing the register allocator. They are never
// associated with a virtual register
INSTRUCTION_OPERAND_PREDICATE(Explicit, EXPLICIT)
// AllocatedOperands are registers or stack slots that are assigned by the
// register allocator and are always associated with a virtual register.
INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
#undef INSTRUCTION_OPERAND_PREDICATE
......@@ -70,18 +86,18 @@ class InstructionOperand {
return this->value_ < that.value_;
}
bool EqualsModuloType(const InstructionOperand& that) const {
return this->GetValueModuloType() == that.GetValueModuloType();
bool EqualsCanonicalized(const InstructionOperand& that) const {
return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
}
bool CompareModuloType(const InstructionOperand& that) const {
return this->GetValueModuloType() < that.GetValueModuloType();
bool CompareCanonicalized(const InstructionOperand& that) const {
return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
}
protected:
explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {}
inline uint64_t GetValueModuloType() const;
inline uint64_t GetCanonicalizedValue() const;
class KindField : public BitField64<Kind, 0, 3> {};
......@@ -353,56 +369,44 @@ class ImmediateOperand : public InstructionOperand {
};
class AllocatedOperand : public InstructionOperand {
class LocationOperand : public InstructionOperand {
public:
// TODO(dcarney): machine_type makes this now redundant. Just need to know is
// the operand is a slot or a register.
enum AllocatedKind {
STACK_SLOT,
DOUBLE_STACK_SLOT,
REGISTER,
DOUBLE_REGISTER
};
enum LocationKind { REGISTER, STACK_SLOT };
AllocatedOperand(AllocatedKind kind, MachineType machine_type, int index)
: InstructionOperand(ALLOCATED) {
DCHECK_IMPLIES(kind == REGISTER || kind == DOUBLE_REGISTER, index >= 0);
LocationOperand(InstructionOperand::Kind operand_kind,
LocationOperand::LocationKind location_kind,
MachineType machine_type, int index)
: InstructionOperand(operand_kind) {
DCHECK_IMPLIES(location_kind == REGISTER, index >= 0);
DCHECK(IsSupportedMachineType(machine_type));
value_ |= AllocatedKindField::encode(kind);
value_ |= LocationKindField::encode(location_kind);
value_ |= MachineTypeField::encode(machine_type);
value_ |= static_cast<int64_t>(index) << IndexField::kShift;
}
int index() const {
DCHECK(STACK_SLOT == allocated_kind() ||
DOUBLE_STACK_SLOT == allocated_kind());
DCHECK(IsStackSlot() || IsDoubleStackSlot());
return static_cast<int64_t>(value_) >> IndexField::kShift;
}
Register GetRegister() const {
DCHECK(REGISTER == allocated_kind() || DOUBLE_REGISTER == allocated_kind());
DCHECK(IsRegister());
return Register::from_code(static_cast<int64_t>(value_) >>
IndexField::kShift);
}
DoubleRegister GetDoubleRegister() const {
DCHECK(REGISTER == allocated_kind() || DOUBLE_REGISTER == allocated_kind());
DCHECK(IsDoubleRegister());
return DoubleRegister::from_code(static_cast<int64_t>(value_) >>
IndexField::kShift);
}
AllocatedKind allocated_kind() const {
return AllocatedKindField::decode(value_);
LocationKind location_kind() const {
return LocationKindField::decode(value_);
}
MachineType machine_type() const { return MachineTypeField::decode(value_); }
static AllocatedOperand* New(Zone* zone, AllocatedKind kind,
MachineType machine_type, int index) {
return InstructionOperand::New(zone,
AllocatedOperand(kind, machine_type, index));
}
static bool IsSupportedMachineType(MachineType machine_type) {
if (RepresentationOf(machine_type) != machine_type) return false;
switch (machine_type) {
......@@ -417,71 +421,99 @@ class AllocatedOperand : public InstructionOperand {
}
}
INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED);
static LocationOperand* cast(InstructionOperand* op) {
DCHECK(ALLOCATED == op->kind() || EXPLICIT == op->kind());
return static_cast<LocationOperand*>(op);
}
static const LocationOperand* cast(const InstructionOperand* op) {
DCHECK(ALLOCATED == op->kind() || EXPLICIT == op->kind());
return static_cast<const LocationOperand*>(op);
}
static LocationOperand cast(const InstructionOperand& op) {
DCHECK(ALLOCATED == op.kind() || EXPLICIT == op.kind());
return *static_cast<const LocationOperand*>(&op);
}
STATIC_ASSERT(KindField::kSize == 3);
class AllocatedKindField : public BitField64<AllocatedKind, 3, 2> {};
class LocationKindField : public BitField64<LocationKind, 3, 2> {};
class MachineTypeField : public BitField64<MachineType, 5, 16> {};
class IndexField : public BitField64<int32_t, 35, 29> {};
};
class ExplicitOperand : public LocationOperand {
public:
ExplicitOperand(LocationKind kind, MachineType machine_type, int index);
static ExplicitOperand* New(Zone* zone, LocationKind kind,
MachineType machine_type, int index) {
return InstructionOperand::New(zone,
ExplicitOperand(kind, machine_type, index));
}
INSTRUCTION_OPERAND_CASTS(ExplicitOperand, EXPLICIT);
};
class AllocatedOperand : public LocationOperand {
public:
AllocatedOperand(LocationKind kind, MachineType machine_type, int index)
: LocationOperand(ALLOCATED, kind, machine_type, index) {}
static AllocatedOperand* New(Zone* zone, LocationKind kind,
MachineType machine_type, int index) {
return InstructionOperand::New(zone,
AllocatedOperand(kind, machine_type, index));
}
INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED);
};
#undef INSTRUCTION_OPERAND_CASTS
#define ALLOCATED_OPERAND_LIST(V) \
V(StackSlot, STACK_SLOT) \
V(DoubleStackSlot, DOUBLE_STACK_SLOT) \
V(Register, REGISTER) \
V(DoubleRegister, DOUBLE_REGISTER)
#define ALLOCATED_OPERAND_IS(SubKind, kOperandKind) \
bool InstructionOperand::Is##SubKind() const { \
return IsAllocated() && \
AllocatedOperand::cast(this)->allocated_kind() == \
AllocatedOperand::kOperandKind; \
}
ALLOCATED_OPERAND_LIST(ALLOCATED_OPERAND_IS)
#undef ALLOCATED_OPERAND_IS
// TODO(dcarney): these subkinds are now pretty useless, nuke.
#define ALLOCATED_OPERAND_CLASS(SubKind, kOperandKind) \
class SubKind##Operand final : public AllocatedOperand { \
public: \
explicit SubKind##Operand(MachineType machine_type, int index) \
: AllocatedOperand(kOperandKind, machine_type, index) {} \
\
static SubKind##Operand* New(Zone* zone, MachineType machine_type, \
int index) { \
return InstructionOperand::New(zone, \
SubKind##Operand(machine_type, index)); \
} \
\
static SubKind##Operand* cast(InstructionOperand* op) { \
DCHECK_EQ(kOperandKind, AllocatedOperand::cast(op)->allocated_kind()); \
return reinterpret_cast<SubKind##Operand*>(op); \
} \
\
static const SubKind##Operand* cast(const InstructionOperand* op) { \
DCHECK_EQ(kOperandKind, AllocatedOperand::cast(op)->allocated_kind()); \
return reinterpret_cast<const SubKind##Operand*>(op); \
} \
\
static SubKind##Operand cast(const InstructionOperand& op) { \
DCHECK_EQ(kOperandKind, AllocatedOperand::cast(op).allocated_kind()); \
return *static_cast<const SubKind##Operand*>(&op); \
} \
};
ALLOCATED_OPERAND_LIST(ALLOCATED_OPERAND_CLASS)
#undef ALLOCATED_OPERAND_CLASS
bool InstructionOperand::IsRegister() const {
return (IsAllocated() || IsExplicit()) &&
LocationOperand::cast(this)->location_kind() ==
LocationOperand::REGISTER &&
!IsFloatingPoint(LocationOperand::cast(this)->machine_type());
}
bool InstructionOperand::IsDoubleRegister() const {
return (IsAllocated() || IsExplicit()) &&
LocationOperand::cast(this)->location_kind() ==
LocationOperand::REGISTER &&
IsFloatingPoint(LocationOperand::cast(this)->machine_type());
}
bool InstructionOperand::IsStackSlot() const {
return (IsAllocated() || IsExplicit()) &&
LocationOperand::cast(this)->location_kind() ==
LocationOperand::STACK_SLOT &&
!IsFloatingPoint(LocationOperand::cast(this)->machine_type());
}
bool InstructionOperand::IsDoubleStackSlot() const {
return (IsAllocated() || IsExplicit()) &&
LocationOperand::cast(this)->location_kind() ==
LocationOperand::STACK_SLOT &&
IsFloatingPoint(LocationOperand::cast(this)->machine_type());
}
uint64_t InstructionOperand::GetValueModuloType() const {
if (IsAllocated()) {
uint64_t InstructionOperand::GetCanonicalizedValue() const {
if (IsAllocated() || IsExplicit()) {
// TODO(dcarney): put machine type last and mask.
return AllocatedOperand::MachineTypeField::update(this->value_, kMachNone);
MachineType canonicalized_machine_type =
IsFloatingPoint(LocationOperand::cast(this)->machine_type())
? kMachFloat64
: kMachNone;
return InstructionOperand::KindField::update(
LocationOperand::MachineTypeField::update(this->value_,
canonicalized_machine_type),
LocationOperand::EXPLICIT);
}
return this->value_;
}
......@@ -491,7 +523,7 @@ uint64_t InstructionOperand::GetValueModuloType() const {
struct CompareOperandModuloType {
bool operator()(const InstructionOperand& a,
const InstructionOperand& b) const {
return a.CompareModuloType(b);
return a.CompareCanonicalized(b);
}
};
......@@ -523,14 +555,14 @@ class MoveOperands final : public ZoneObject {
// True if this move a move into the given destination operand.
bool Blocks(const InstructionOperand& operand) const {
return !IsEliminated() && source().EqualsModuloType(operand);
return !IsEliminated() && source().EqualsCanonicalized(operand);
}
// A move is redundant if it's been eliminated or if its source and
// destination are the same.
bool IsRedundant() const {
DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
return IsEliminated() || source_.EqualsModuloType(destination_);
return IsEliminated() || source_.EqualsCanonicalized(destination_);
}
// We clear both operands to indicate move that's been eliminated.
......
......@@ -14,10 +14,10 @@ typedef std::pair<InstructionOperand, InstructionOperand> MoveKey;
struct MoveKeyCompare {
bool operator()(const MoveKey& a, const MoveKey& b) const {
if (a.first.EqualsModuloType(b.first)) {
return a.second.CompareModuloType(b.second);
if (a.first.EqualsCanonicalized(b.first)) {
return a.second.CompareCanonicalized(b.second);
}
return a.first.CompareModuloType(b.first);
return a.first.CompareCanonicalized(b.first);
}
};
......@@ -245,12 +245,12 @@ bool IsSlot(const InstructionOperand& op) {
bool LoadCompare(const MoveOperands* a, const MoveOperands* b) {
if (!a->source().EqualsModuloType(b->source())) {
return a->source().CompareModuloType(b->source());
if (!a->source().EqualsCanonicalized(b->source())) {
return a->source().CompareCanonicalized(b->source());
}
if (IsSlot(a->destination()) && !IsSlot(b->destination())) return false;
if (!IsSlot(a->destination()) && IsSlot(b->destination())) return true;
return a->destination().CompareModuloType(b->destination());
return a->destination().CompareCanonicalized(b->destination());
}
} // namespace
......@@ -276,7 +276,7 @@ void MoveOptimizer::FinalizeMoves(Instruction* instr) {
for (auto load : loads) {
// New group.
if (group_begin == nullptr ||
!load->source().EqualsModuloType(group_begin->source())) {
!load->source().EqualsCanonicalized(group_begin->source())) {
group_begin = load;
continue;
}
......
......@@ -48,7 +48,7 @@ void VerifyAllocatedGaps(const Instruction* instr) {
void RegisterAllocatorVerifier::VerifyInput(
const OperandConstraint& constraint) {
CHECK_NE(kSameAsFirst, constraint.type_);
if (constraint.type_ != kImmediate) {
if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
constraint.virtual_register_);
}
......@@ -59,6 +59,7 @@ void RegisterAllocatorVerifier::VerifyTemp(
const OperandConstraint& constraint) {
CHECK_NE(kSameAsFirst, constraint.type_);
CHECK_NE(kImmediate, constraint.type_);
CHECK_NE(kExplicit, constraint.type_);
CHECK_NE(kConstant, constraint.type_);
}
......@@ -66,6 +67,7 @@ void RegisterAllocatorVerifier::VerifyTemp(
void RegisterAllocatorVerifier::VerifyOutput(
const OperandConstraint& constraint) {
CHECK_NE(kImmediate, constraint.type_);
CHECK_NE(kExplicit, constraint.type_);
CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
constraint.virtual_register_);
}
......@@ -143,6 +145,8 @@ void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
constraint->type_ = kConstant;
constraint->value_ = ConstantOperand::cast(op)->virtual_register();
constraint->virtual_register_ = constraint->value_;
} else if (op->IsExplicit()) {
constraint->type_ = kExplicit;
} else if (op->IsImmediate()) {
auto imm = ImmediateOperand::cast(op);
int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
......@@ -214,22 +218,25 @@ void RegisterAllocatorVerifier::CheckConstraint(
case kRegister:
CHECK(op->IsRegister());
return;
case kDoubleRegister:
CHECK(op->IsDoubleRegister());
return;
case kExplicit:
CHECK(op->IsExplicit());
return;
case kFixedRegister:
CHECK(op->IsRegister());
CHECK_EQ(RegisterOperand::cast(op)->GetDoubleRegister().code(),
CHECK_EQ(LocationOperand::cast(op)->GetRegister().code(),
constraint->value_);
return;
case kDoubleRegister:
CHECK(op->IsDoubleRegister());
return;
case kFixedDoubleRegister:
CHECK(op->IsDoubleRegister());
CHECK_EQ(DoubleRegisterOperand::cast(op)->GetDoubleRegister().code(),
CHECK_EQ(LocationOperand::cast(op)->GetDoubleRegister().code(),
constraint->value_);
return;
case kFixedSlot:
CHECK(op->IsStackSlot());
CHECK_EQ(StackSlotOperand::cast(op)->index(), constraint->value_);
CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
return;
case kSlot:
CHECK(op->IsStackSlot());
......@@ -282,7 +289,7 @@ class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject {
struct OperandLess {
bool operator()(const InstructionOperand* a,
const InstructionOperand* b) const {
return a->CompareModuloType(*b);
return a->CompareCanonicalized(*b);
}
};
......@@ -316,7 +323,7 @@ class OperandMap : public ZoneObject {
this->erase(it++);
if (it == this->end()) return;
}
if (it->first->EqualsModuloType(*o.first)) {
if (it->first->EqualsCanonicalized(*o.first)) {
++it;
if (it == this->end()) return;
} else {
......@@ -676,7 +683,10 @@ void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
const auto op_constraints = instr_constraint.operand_constraints_;
size_t count = 0;
for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
if (op_constraints[count].type_ == kImmediate) continue;
if (op_constraints[count].type_ == kImmediate ||
op_constraints[count].type_ == kExplicit) {
continue;
}
int virtual_register = op_constraints[count].virtual_register_;
auto op = instr->InputAt(i);
if (!block_maps->IsPhi(virtual_register)) {
......
......@@ -35,6 +35,7 @@ class RegisterAllocatorVerifier final : public ZoneObject {
kFixedSlot,
kNone,
kNoneDouble,
kExplicit,
kSameAsFirst
};
......
......@@ -71,7 +71,7 @@ bool IsOutputRegisterOf(Instruction* instr, Register reg) {
for (size_t i = 0; i < instr->OutputCount(); i++) {
auto output = instr->OutputAt(i);
if (output->IsRegister() &&
RegisterOperand::cast(output)->GetRegister().is(reg)) {
LocationOperand::cast(output)->GetRegister().is(reg)) {
return true;
}
}
......@@ -83,7 +83,7 @@ bool IsOutputDoubleRegisterOf(Instruction* instr, DoubleRegister reg) {
for (size_t i = 0; i < instr->OutputCount(); i++) {
auto output = instr->OutputAt(i);
if (output->IsDoubleRegister() &&
DoubleRegisterOperand::cast(output)->GetDoubleRegister().is(reg)) {
LocationOperand::cast(output)->GetDoubleRegister().is(reg)) {
return true;
}
}
......@@ -161,10 +161,8 @@ bool UsePosition::HintRegister(int* register_code) const {
auto operand = reinterpret_cast<InstructionOperand*>(hint_);
int assigned_register =
operand->IsRegister()
? RegisterOperand::cast(operand)->GetRegister().code()
: DoubleRegisterOperand::cast(operand)
->GetDoubleRegister()
.code();
? LocationOperand::cast(operand)->GetRegister().code()
: LocationOperand::cast(operand)->GetDoubleRegister().code();
*register_code = assigned_register;
return true;
}
......@@ -186,17 +184,16 @@ UsePositionHintType UsePosition::HintTypeForOperand(
switch (op.kind()) {
case InstructionOperand::CONSTANT:
case InstructionOperand::IMMEDIATE:
case InstructionOperand::EXPLICIT:
return UsePositionHintType::kNone;
case InstructionOperand::UNALLOCATED:
return UsePositionHintType::kUnresolved;
case InstructionOperand::ALLOCATED:
switch (AllocatedOperand::cast(op).allocated_kind()) {
case AllocatedOperand::REGISTER:
case AllocatedOperand::DOUBLE_REGISTER:
return UsePositionHintType::kOperand;
case AllocatedOperand::STACK_SLOT:
case AllocatedOperand::DOUBLE_STACK_SLOT:
return UsePositionHintType::kNone;
if (op.IsRegister() || op.IsDoubleRegister()) {
return UsePositionHintType::kOperand;
} else {
DCHECK(op.IsStackSlot() || op.IsDoubleStackSlot());
return UsePositionHintType::kNone;
}
case InstructionOperand::INVALID:
break;
......@@ -400,12 +397,8 @@ bool LiveRange::IsTopLevel() const { return top_level_ == this; }
InstructionOperand LiveRange::GetAssignedOperand() const {
if (HasRegisterAssigned()) {
DCHECK(!spilled());
switch (kind()) {
case GENERAL_REGISTERS:
return RegisterOperand(machine_type(), assigned_register());
case DOUBLE_REGISTERS:
return DoubleRegisterOperand(machine_type(), assigned_register());
}
return AllocatedOperand(LocationOperand::REGISTER, machine_type(),
assigned_register());
}
DCHECK(spilled());
DCHECK(!HasRegisterAssigned());
......@@ -841,14 +834,7 @@ void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
auto spill_range = GetSpillRange();
int index = spill_range->assigned_slot();
switch (kind()) {
case GENERAL_REGISTERS:
return StackSlotOperand(machine_type(), index);
case DOUBLE_REGISTERS:
return DoubleStackSlotOperand(machine_type(), index);
}
UNREACHABLE();
return StackSlotOperand(kMachNone, 0);
return AllocatedOperand(LocationOperand::STACK_SLOT, machine_type(), index);
}
......@@ -1532,18 +1518,17 @@ InstructionOperand* ConstraintBuilder::AllocateFixed(
machine_type = data()->MachineTypeFor(virtual_register);
}
if (operand->HasFixedSlotPolicy()) {
AllocatedOperand::AllocatedKind kind =
IsFloatingPoint(machine_type) ? AllocatedOperand::DOUBLE_STACK_SLOT
: AllocatedOperand::STACK_SLOT;
allocated =
AllocatedOperand(kind, machine_type, operand->fixed_slot_index());
allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, machine_type,
operand->fixed_slot_index());
} else if (operand->HasFixedRegisterPolicy()) {
DCHECK(!IsFloatingPoint(machine_type));
allocated = AllocatedOperand(AllocatedOperand::REGISTER, machine_type,
operand->fixed_register_index());
} else if (operand->HasFixedDoubleRegisterPolicy()) {
DCHECK(IsFloatingPoint(machine_type));
DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
allocated = AllocatedOperand(AllocatedOperand::DOUBLE_REGISTER,
machine_type, operand->fixed_register_index());
allocated = AllocatedOperand(AllocatedOperand::REGISTER, machine_type,
operand->fixed_register_index());
} else {
UNREACHABLE();
}
......@@ -1594,9 +1579,9 @@ void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
AllocateFixed(output, -1, false);
// This value is produced on the stack, we never need to spill it.
if (output->IsStackSlot()) {
DCHECK(StackSlotOperand::cast(output)->index() <
DCHECK(LocationOperand::cast(output)->index() <
data()->frame()->GetSpillSlotCount());
range->SetSpillOperand(StackSlotOperand::cast(output));
range->SetSpillOperand(LocationOperand::cast(output));
range->SetSpillStartIndex(end);
assigned = true;
}
......@@ -1654,9 +1639,9 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
// This value is produced on the stack, we never need to spill it.
if (first_output->IsStackSlot()) {
DCHECK(StackSlotOperand::cast(first_output)->index() <
DCHECK(LocationOperand::cast(first_output)->index() <
data()->frame()->GetTotalFrameSlotCount());
range->SetSpillOperand(StackSlotOperand::cast(first_output));
range->SetSpillOperand(LocationOperand::cast(first_output));
range->SetSpillStartIndex(instr_index + 1);
assigned = true;
}
......@@ -1679,7 +1664,9 @@ void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
// Handle fixed input operands of second instruction.
for (size_t i = 0; i < second->InputCount(); i++) {
auto input = second->InputAt(i);
if (input->IsImmediate()) continue; // Ignore immediates.
if (input->IsImmediate() || input->IsExplicit()) {
continue; // Ignore immediates and explicitly reserved registers.
}
auto cur_input = UnallocatedOperand::cast(input);
if (cur_input->HasFixedPolicy()) {
int input_vreg = cur_input->virtual_register();
......@@ -1859,10 +1846,10 @@ TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
ConstantOperand::cast(operand)->virtual_register());
} else if (operand->IsRegister()) {
return FixedLiveRangeFor(
RegisterOperand::cast(operand)->GetRegister().code());
LocationOperand::cast(operand)->GetRegister().code());
} else if (operand->IsDoubleRegister()) {
return FixedDoubleLiveRangeFor(
DoubleRegisterOperand::cast(operand)->GetDoubleRegister().code());
LocationOperand::cast(operand)->GetDoubleRegister().code());
} else {
return nullptr;
}
......@@ -1976,7 +1963,9 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
for (size_t i = 0; i < instr->InputCount(); i++) {
auto input = instr->InputAt(i);
if (input->IsImmediate()) continue; // Ignore immediates.
if (input->IsImmediate() || input->IsExplicit()) {
continue; // Ignore immediates and explicitly reserved registers.
}
LifetimePosition use_pos;
if (input->IsUnallocated() &&
UnallocatedOperand::cast(input)->IsUsedAtStart()) {
......
......@@ -32,13 +32,17 @@ class InterpreterState {
private:
struct Key {
bool is_constant;
AllocatedOperand::AllocatedKind kind;
bool is_float;
LocationOperand::LocationKind kind;
int index;
bool operator<(const Key& other) const {
if (this->is_constant != other.is_constant) {
return this->is_constant;
}
if (this->is_float != other.is_float) {
return this->is_float;
}
if (this->kind != other.kind) {
return this->kind < other.kind;
}
......@@ -70,22 +74,24 @@ class InterpreterState {
static Key KeyFor(const InstructionOperand& op) {
bool is_constant = op.IsConstant();
AllocatedOperand::AllocatedKind kind;
bool is_float = false;
LocationOperand::LocationKind kind;
int index;
if (!is_constant) {
if (op.IsRegister()) {
index = AllocatedOperand::cast(op).GetRegister().code();
index = LocationOperand::cast(op).GetRegister().code();
} else if (op.IsDoubleRegister()) {
index = AllocatedOperand::cast(op).GetDoubleRegister().code();
index = LocationOperand::cast(op).GetDoubleRegister().code();
} else {
index = AllocatedOperand::cast(op).index();
index = LocationOperand::cast(op).index();
}
kind = AllocatedOperand::cast(op).allocated_kind();
is_float = IsFloatingPoint(LocationOperand::cast(op).machine_type());
kind = LocationOperand::cast(op).location_kind();
} else {
index = ConstantOperand::cast(op).virtual_register();
kind = AllocatedOperand::REGISTER;
kind = LocationOperand::REGISTER;
}
Key key = {is_constant, kind, index};
Key key = {is_constant, is_float, kind, index};
return key;
}
......@@ -192,18 +198,26 @@ class ParallelMoveCreator : public HandleAndZoneScope {
}
InstructionOperand CreateRandomOperand(bool is_source) {
int index = rng_->NextInt(6);
int index = rng_->NextInt(7);
// destination can't be Constant.
switch (rng_->NextInt(is_source ? 5 : 4)) {
switch (rng_->NextInt(is_source ? 7 : 6)) {
case 0:
return StackSlotOperand(RandomType(), index);
return AllocatedOperand(LocationOperand::STACK_SLOT, RandomType(),
index);
case 1:
return DoubleStackSlotOperand(RandomDoubleType(), index);
return AllocatedOperand(LocationOperand::STACK_SLOT, RandomDoubleType(),
index);
case 2:
return RegisterOperand(RandomType(), index);
return AllocatedOperand(LocationOperand::REGISTER, RandomType(), index);
case 3:
return DoubleRegisterOperand(RandomDoubleType(), index);
return AllocatedOperand(LocationOperand::REGISTER, RandomDoubleType(),
index);
case 4:
return ExplicitOperand(LocationOperand::REGISTER, RandomType(), 1);
case 5:
return ExplicitOperand(LocationOperand::STACK_SLOT, RandomType(),
index);
case 6:
return ConstantOperand(index);
}
UNREACHABLE();
......
......@@ -59,14 +59,16 @@ class TestCode : public HandleAndZoneScope {
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
AddGapMove(index, RegisterOperand(kRepWord32, 13),
RegisterOperand(kRepWord32, 13));
AddGapMove(index,
AllocatedOperand(LocationOperand::REGISTER, kRepWord32, 13),
AllocatedOperand(LocationOperand::REGISTER, kRepWord32, 13));
}
void NonRedundantMoves() {
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
AddGapMove(index, ConstantOperand(11), RegisterOperand(kRepWord32, 11));
AddGapMove(index, ConstantOperand(11),
AllocatedOperand(LocationOperand::REGISTER, kRepWord32, 11));
}
void Other() {
Start();
......
......@@ -36,6 +36,7 @@ class InstructionSequenceTest : public TestWithIsolateAndZone {
kFixedRegister,
kSlot,
kFixedSlot,
kExplicit,
kImmediate,
kNone,
kConstant,
......@@ -57,6 +58,11 @@ class InstructionSequenceTest : public TestWithIsolateAndZone {
static TestOperand Same() { return TestOperand(kSameAsFirst, VReg()); }
static TestOperand ExplicitReg(int index) {
TestOperandType type = kExplicit;
return TestOperand(type, VReg(), index);
}
static TestOperand Reg(VReg vreg, int index = kNoValue) {
TestOperandType type = kRegister;
if (index != kNoValue) type = kFixedRegister;
......
......@@ -67,10 +67,16 @@ class MoveOptimizerTest : public InstructionSequenceTest {
case kConstant:
return ConstantOperand(op.value_);
case kFixedSlot:
return StackSlotOperand(kRepWord32, op.value_);
return AllocatedOperand(LocationOperand::STACK_SLOT, kRepWord32,
op.value_);
case kFixedRegister:
CHECK(0 <= op.value_ && op.value_ < num_general_registers());
return RegisterOperand(kRepWord32, op.value_);
return AllocatedOperand(LocationOperand::REGISTER, kRepWord32,
op.value_);
case kExplicit:
CHECK(0 <= op.value_ && op.value_ < num_general_registers());
return ExplicitOperand(LocationOperand::REGISTER, kRepWord32,
op.value_);
default:
break;
}
......@@ -97,6 +103,23 @@ TEST_F(MoveOptimizerTest, RemovesRedundant) {
}
TEST_F(MoveOptimizerTest, RemovesRedundantExplicit) {
StartBlock();
auto first_instr = EmitNop();
AddMove(first_instr, Reg(0), ExplicitReg(1));
auto last_instr = EmitNop();
AddMove(last_instr, Reg(1), Reg(0));
EndBlock(Last());
Optimize();
CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
auto move = last_instr->parallel_moves()[0];
CHECK_EQ(1, NonRedundantSize(move));
CHECK(Contains(move, Reg(0), ExplicitReg(1)));
}
TEST_F(MoveOptimizerTest, SplitsConstants) {
StartBlock();
EndBlock(Last());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment