Commit 0ee4b473 authored by mtrofin's avatar mtrofin Committed by Commit bot

[turbofan] Separate LiveRange and TopLevelLiveRange concepts

A TopLevelLiveRange is the live range of a virtual register. Through
register allocation, it may end up being split in a succession of child
live ranges, where data flow is handled through moves from
predecessor to successor child.

Today, the concepts of "top level" and "child" live ranges are conflated
under the LiveRange class. However, a good few APIs pertain solely
to TopLevelLiveRanges. This was communicated through comments or
DCHECKs - but this makes for poor code comprehensibility and maintainability.

For example, the worklist of the register allocator (live_ranges()) needs
to only contain TopLevelLiveRanges; spill range concerns are associated
only with the top range; phi-ness; certain phases in the allocation pipeline;
APIs on LiveRange used for initial construction - before splitting;
splintering - these are all responsibilities associated to TopLevelLiveRanges,
and not child live ranges.

This change separates the concepts.

An effect of this change is that child live range allocation need not involve
RegisterAllocationData. That's "a good thing" (lower coupling), but it has
the side-effect of not having a good way to construct unique identifiers for
child live ranges, relative to a given InstructionSequence.

LiveRange Id are used primarily for tracing/output-ing, and debugging.

I propose a 2-component identifier: a virtual register (vreg) number,
uniquely identifying TopLevelLiveRanges; and a relative identifier, which
uniquely identifies children of a given TopLevelLiveRange. "0" is reserved
for the TopLevel range. The relative identifier does not necessarily
indicate order in the child chain, which is no worse than the current state
of affairs.

I believe this change should make it easier to understand a trace output
(because the virtual register number is readily available). I plan to formalize
with a small structure the notion of live range id, and consolidate tracing
around that, as part of a separate CL. (there are seemingly disparate ways
to trace - printf or stream-based APIs - so this seems like an opportune
change to consolidate that)

Review URL: https://codereview.chromium.org/1311983002

Cr-Commit-Position: refs/heads/master@{#30370}
parent 268420af
...@@ -420,7 +420,9 @@ class GraphC1Visualizer { ...@@ -420,7 +420,9 @@ class GraphC1Visualizer {
void PrintInputs(InputIterator* i, int count, const char* prefix); void PrintInputs(InputIterator* i, int count, const char* prefix);
void PrintType(Node* node); void PrintType(Node* node);
void PrintLiveRange(LiveRange* range, const char* type); void PrintLiveRange(LiveRange* range, const char* type, int vreg);
void PrintLiveRangeChain(TopLevelLiveRange* range, const char* type);
class Tag final BASE_EMBEDDED { class Tag final BASE_EMBEDDED {
public: public:
Tag(GraphC1Visualizer* visualizer, const char* name) { Tag(GraphC1Visualizer* visualizer, const char* name) {
...@@ -694,23 +696,33 @@ void GraphC1Visualizer::PrintLiveRanges(const char* phase, ...@@ -694,23 +696,33 @@ void GraphC1Visualizer::PrintLiveRanges(const char* phase,
PrintStringProperty("name", phase); PrintStringProperty("name", phase);
for (auto range : data->fixed_double_live_ranges()) { for (auto range : data->fixed_double_live_ranges()) {
PrintLiveRange(range, "fixed"); PrintLiveRangeChain(range, "fixed");
} }
for (auto range : data->fixed_live_ranges()) { for (auto range : data->fixed_live_ranges()) {
PrintLiveRange(range, "fixed"); PrintLiveRangeChain(range, "fixed");
} }
for (auto range : data->live_ranges()) { for (auto range : data->live_ranges()) {
PrintLiveRange(range, "object"); PrintLiveRangeChain(range, "object");
}
}
void GraphC1Visualizer::PrintLiveRangeChain(TopLevelLiveRange* range,
const char* type) {
int vreg = range->vreg();
for (LiveRange* child = range; child != nullptr; child = child->next()) {
PrintLiveRange(child, type, vreg);
} }
} }
void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) { void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type,
int vreg) {
if (range != NULL && !range->IsEmpty()) { if (range != NULL && !range->IsEmpty()) {
PrintIndent(); PrintIndent();
os_ << range->id() << " " << type; os_ << vreg << ":" << range->relative_id() << " " << type;
if (range->HasRegisterAssigned()) { if (range->HasRegisterAssigned()) {
AllocatedOperand op = AllocatedOperand::cast(range->GetAssignedOperand()); AllocatedOperand op = AllocatedOperand::cast(range->GetAssignedOperand());
int assigned_reg = op.index(); int assigned_reg = op.index();
...@@ -739,13 +751,8 @@ void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) { ...@@ -739,13 +751,8 @@ void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) {
} }
} }
} }
int parent_index = -1;
if (range->IsChild()) { os_ << " " << vreg;
parent_index = range->parent()->id();
} else {
parent_index = range->id();
}
os_ << " " << parent_index;
for (auto interval = range->first_interval(); interval != nullptr; for (auto interval = range->first_interval(); interval != nullptr;
interval = interval->next()) { interval = interval->next()) {
os_ << " [" << interval->start().value() << ", " os_ << " [" << interval->start().value() << ", "
......
...@@ -22,11 +22,11 @@ const float GreedyAllocator::kAllocatedRangeMultiplier = 10.0; ...@@ -22,11 +22,11 @@ const float GreedyAllocator::kAllocatedRangeMultiplier = 10.0;
namespace { namespace {
void UpdateOperands(LiveRange* range, RegisterAllocationData* data) { void UpdateOperands(TopLevelLiveRange* range, RegisterAllocationData* data) {
int reg_id = range->assigned_register(); int reg_id = range->assigned_register();
range->SetUseHints(reg_id); range->SetUseHints(reg_id);
if (range->is_phi()) { if (range->is_phi()) {
data->GetPhiMapValueFor(range->id())->set_assigned_register(reg_id); data->GetPhiMapValueFor(range)->set_assigned_register(reg_id);
} }
} }
...@@ -38,8 +38,7 @@ LiveRange* Split(LiveRange* range, RegisterAllocationData* data, ...@@ -38,8 +38,7 @@ LiveRange* Split(LiveRange* range, RegisterAllocationData* data,
(data->code() (data->code()
->GetInstructionBlock(pos.ToInstructionIndex()) ->GetInstructionBlock(pos.ToInstructionIndex())
->last_instruction_index() != pos.ToInstructionIndex())); ->last_instruction_index() != pos.ToInstructionIndex()));
LiveRange* result = data->NewChildRangeFor(range); LiveRange* result = range->SplitAt(pos, data->allocation_zone());
range->SplitAt(pos, result, data->allocation_zone());
return result; return result;
} }
...@@ -117,7 +116,8 @@ AllocationCandidate AllocationScheduler::GetNext() { ...@@ -117,7 +116,8 @@ AllocationCandidate AllocationScheduler::GetNext() {
void AllocationScheduler::Schedule(LiveRange* range) { void AllocationScheduler::Schedule(LiveRange* range) {
TRACE("Scheduling live range %d.\n", range->id()); TRACE("Scheduling live range %d:%d.\n", range->TopLevel()->vreg(),
range->relative_id());
queue_.push(AllocationCandidate(range)); queue_.push(AllocationCandidate(range));
} }
...@@ -130,14 +130,15 @@ GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, ...@@ -130,14 +130,15 @@ GreedyAllocator::GreedyAllocator(RegisterAllocationData* data,
void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) { void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) {
TRACE("Assigning register %s to live range %d\n", RegisterName(reg_id), TRACE("Assigning register %s to live range %d:%d\n", RegisterName(reg_id),
range->id()); range->TopLevel()->vreg(), range->relative_id());
DCHECK(!range->HasRegisterAssigned()); DCHECK(!range->HasRegisterAssigned());
AllocateRegisterToRange(reg_id, range); AllocateRegisterToRange(reg_id, range);
TRACE("Assigning %s to range %d\n", RegisterName(reg_id), range->id()); TRACE("Assigning %s to range %d%d.\n", RegisterName(reg_id),
range->TopLevel()->vreg(), range->relative_id());
range->set_assigned_register(reg_id); range->set_assigned_register(reg_id);
} }
...@@ -151,7 +152,7 @@ void GreedyAllocator::PreallocateFixedRanges() { ...@@ -151,7 +152,7 @@ void GreedyAllocator::PreallocateFixedRanges() {
for (LiveRange* fixed_range : GetFixedRegisters()) { for (LiveRange* fixed_range : GetFixedRegisters()) {
if (fixed_range != nullptr) { if (fixed_range != nullptr) {
DCHECK_EQ(mode(), fixed_range->kind()); DCHECK_EQ(mode(), fixed_range->kind());
DCHECK(fixed_range->IsFixed()); DCHECK(fixed_range->TopLevel()->IsFixed());
int reg_nr = fixed_range->assigned_register(); int reg_nr = fixed_range->assigned_register();
EnsureValidRangeWeight(fixed_range); EnsureValidRangeWeight(fixed_range);
...@@ -180,7 +181,8 @@ void GreedyAllocator::TryAllocateCandidate( ...@@ -180,7 +181,8 @@ void GreedyAllocator::TryAllocateCandidate(
void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) { void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) {
// TODO(mtrofin): once we introduce groups, we'll want to first try and // TODO(mtrofin): once we introduce groups, we'll want to first try and
// allocate at the preferred register. // allocate at the preferred register.
TRACE("Attempting to allocate live range %d\n", range->id()); TRACE("Attempting to allocate live range %d:%d.\n", range->TopLevel()->vreg(),
range->relative_id());
int free_reg = -1; int free_reg = -1;
int evictable_reg = -1; int evictable_reg = -1;
EnsureValidRangeWeight(range); EnsureValidRangeWeight(range);
...@@ -206,8 +208,9 @@ void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) { ...@@ -206,8 +208,9 @@ void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) {
// We have a free register, so we use it. // We have a free register, so we use it.
if (free_reg >= 0) { if (free_reg >= 0) {
TRACE("Found free register %s for live range %d\n", RegisterName(free_reg), TRACE("Found free register %s for live range %d:%d.\n",
range->id()); RegisterName(free_reg), range->TopLevel()->vreg(),
range->relative_id());
AssignRangeToRegister(free_reg, range); AssignRangeToRegister(free_reg, range);
return; return;
} }
...@@ -215,8 +218,9 @@ void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) { ...@@ -215,8 +218,9 @@ void GreedyAllocator::TryAllocateLiveRange(LiveRange* range) {
// We found a register to perform evictions, so we evict and allocate our // We found a register to perform evictions, so we evict and allocate our
// candidate. // candidate.
if (evictable_reg >= 0) { if (evictable_reg >= 0) {
TRACE("Found evictable register %s for live range %d\n", TRACE("Found evictable register %s for live range %d:%d.\n",
RegisterName(free_reg), range->id()); RegisterName(free_reg), range->TopLevel()->vreg(),
range->relative_id());
EvictAndRescheduleConflicts(evictable_reg, range); EvictAndRescheduleConflicts(evictable_reg, range);
AssignRangeToRegister(evictable_reg, range); AssignRangeToRegister(evictable_reg, range);
return; return;
...@@ -233,11 +237,12 @@ void GreedyAllocator::EvictAndRescheduleConflicts(unsigned reg_id, ...@@ -233,11 +237,12 @@ void GreedyAllocator::EvictAndRescheduleConflicts(unsigned reg_id,
for (LiveRange* conflict = conflicts.Current(); conflict != nullptr; for (LiveRange* conflict = conflicts.Current(); conflict != nullptr;
conflict = conflicts.RemoveCurrentAndGetNext()) { conflict = conflicts.RemoveCurrentAndGetNext()) {
DCHECK(conflict->HasRegisterAssigned()); DCHECK(conflict->HasRegisterAssigned());
CHECK(!conflict->IsFixed()); CHECK(!conflict->TopLevel()->IsFixed());
conflict->UnsetAssignedRegister(); conflict->UnsetAssignedRegister();
UpdateWeightAtEviction(conflict); UpdateWeightAtEviction(conflict);
scheduler().Schedule(conflict); scheduler().Schedule(conflict);
TRACE("Evicted range %d.\n", conflict->id()); TRACE("Evicted range %d%d.\n", conflict->TopLevel()->vreg(),
conflict->relative_id());
} }
} }
...@@ -250,7 +255,8 @@ void GreedyAllocator::SplitAndSpillRangesDefinedByMemoryOperand() { ...@@ -250,7 +255,8 @@ void GreedyAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
if (range->HasNoSpillType()) continue; if (range->HasNoSpillType()) continue;
LifetimePosition start = range->Start(); LifetimePosition start = range->Start();
TRACE("Live range %d is defined by a spill operand.\n", range->id()); TRACE("Live range %d:%d is defined by a spill operand.\n",
range->TopLevel()->vreg(), range->relative_id());
auto next_pos = start; auto next_pos = start;
if (next_pos.IsGapPosition()) { if (next_pos.IsGapPosition()) {
next_pos = next_pos.NextStart(); next_pos = next_pos.NextStart();
...@@ -335,7 +341,7 @@ void GreedyAllocator::EnsureValidRangeWeight(LiveRange* range) { ...@@ -335,7 +341,7 @@ void GreedyAllocator::EnsureValidRangeWeight(LiveRange* range) {
// unallocated. // unallocated.
if (range->weight() != LiveRange::kInvalidWeight) return; if (range->weight() != LiveRange::kInvalidWeight) return;
if (range->IsFixed()) { if (range->TopLevel()->IsFixed()) {
range->set_weight(LiveRange::kMaxWeight); range->set_weight(LiveRange::kMaxWeight);
return; return;
} }
......
...@@ -66,9 +66,8 @@ bool IsIntervalAlreadyExcluded(const LiveRange *range, LifetimePosition start, ...@@ -66,9 +66,8 @@ bool IsIntervalAlreadyExcluded(const LiveRange *range, LifetimePosition start,
} }
void CreateSplinter(LiveRange *range, RegisterAllocationData *data, void CreateSplinter(TopLevelLiveRange *range, RegisterAllocationData *data,
LifetimePosition first_cut, LifetimePosition last_cut) { LifetimePosition first_cut, LifetimePosition last_cut) {
DCHECK(!range->IsChild());
DCHECK(!range->IsSplinter()); DCHECK(!range->IsSplinter());
// We can ignore ranges that live solely in deferred blocks. // We can ignore ranges that live solely in deferred blocks.
// If a range ends right at the end of a deferred block, it is marked by // If a range ends right at the end of a deferred block, it is marked by
...@@ -94,7 +93,10 @@ void CreateSplinter(LiveRange *range, RegisterAllocationData *data, ...@@ -94,7 +93,10 @@ void CreateSplinter(LiveRange *range, RegisterAllocationData *data,
if (range->MayRequireSpillRange()) { if (range->MayRequireSpillRange()) {
data->CreateSpillRangeForLiveRange(range); data->CreateSpillRangeForLiveRange(range);
} }
LiveRange *result = data->NewChildRangeFor(range); TopLevelLiveRange *result = data->NextLiveRange(range->machine_type());
DCHECK_NULL(data->live_ranges()[result->vreg()]);
data->live_ranges()[result->vreg()] = result;
Zone *zone = data->allocation_zone(); Zone *zone = data->allocation_zone();
range->Splinter(start, end, result, zone); range->Splinter(start, end, result, zone);
} }
...@@ -138,7 +140,7 @@ void SplinterRangesInDeferredBlocks(RegisterAllocationData *data) { ...@@ -138,7 +140,7 @@ void SplinterRangesInDeferredBlocks(RegisterAllocationData *data) {
int range_id = iterator.Current(); int range_id = iterator.Current();
iterator.Advance(); iterator.Advance();
LiveRange *range = data->live_ranges()[range_id]; TopLevelLiveRange *range = data->live_ranges()[range_id];
CreateSplinter(range, data, first_cut, last_cut); CreateSplinter(range, data, first_cut, last_cut);
} }
} }
...@@ -155,12 +157,11 @@ void LiveRangeSeparator::Splinter() { ...@@ -155,12 +157,11 @@ void LiveRangeSeparator::Splinter() {
void LiveRangeMerger::Merge() { void LiveRangeMerger::Merge() {
int live_range_count = static_cast<int>(data()->live_ranges().size()); int live_range_count = static_cast<int>(data()->live_ranges().size());
for (int i = 0; i < live_range_count; ++i) { for (int i = 0; i < live_range_count; ++i) {
LiveRange *range = data()->live_ranges()[i]; TopLevelLiveRange *range = data()->live_ranges()[i];
if (range == nullptr || range->IsEmpty() || range->IsChild() || if (range == nullptr || range->IsEmpty() || !range->IsSplinter()) {
!range->IsSplinter()) {
continue; continue;
} }
LiveRange *splinter_parent = range->splintered_from(); TopLevelLiveRange *splinter_parent = range->splintered_from();
splinter_parent->Merge(range, data()); splinter_parent->Merge(range, data());
} }
......
...@@ -230,41 +230,26 @@ std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) { ...@@ -230,41 +230,26 @@ std::ostream& operator<<(std::ostream& os, const LifetimePosition pos) {
} }
struct LiveRange::SpillAtDefinitionList : ZoneObject {
SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
SpillAtDefinitionList* next)
: gap_index(gap_index), operand(operand), next(next) {}
const int gap_index;
InstructionOperand* const operand;
SpillAtDefinitionList* const next;
};
const float LiveRange::kInvalidWeight = -1; const float LiveRange::kInvalidWeight = -1;
const float LiveRange::kMaxWeight = std::numeric_limits<float>::max(); const float LiveRange::kMaxWeight = std::numeric_limits<float>::max();
LiveRange::LiveRange(int id, MachineType machine_type) LiveRange::LiveRange(int relative_id, MachineType machine_type,
: id_(id), TopLevelLiveRange* top_level)
spill_start_index_(kMaxInt), : relative_id_(relative_id),
bits_(0), bits_(0),
last_interval_(nullptr), last_interval_(nullptr),
first_interval_(nullptr), first_interval_(nullptr),
first_pos_(nullptr), first_pos_(nullptr),
parent_(nullptr), top_level_(top_level),
next_(nullptr), next_(nullptr),
splintered_from_(nullptr),
spill_operand_(nullptr),
spills_at_definition_(nullptr),
current_interval_(nullptr), current_interval_(nullptr),
last_processed_use_(nullptr), last_processed_use_(nullptr),
current_hint_position_(nullptr), current_hint_position_(nullptr),
size_(kInvalidSize), size_(kInvalidSize),
weight_(kInvalidWeight), weight_(kInvalidWeight) {
spilled_in_deferred_block_(false) {
DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type)); DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type));
bits_ = SpillTypeField::encode(SpillType::kNoSpillType) | bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
AssignedRegisterField::encode(kUnassignedRegister) |
MachineTypeField::encode(machine_type); MachineTypeField::encode(machine_type);
} }
...@@ -316,121 +301,6 @@ RegisterKind LiveRange::kind() const { ...@@ -316,121 +301,6 @@ RegisterKind LiveRange::kind() const {
} }
void LiveRange::SpillAtDefinition(Zone* zone, int gap_index,
InstructionOperand* operand) {
DCHECK(HasNoSpillType());
spills_at_definition_ = new (zone)
SpillAtDefinitionList(gap_index, operand, spills_at_definition_);
}
bool LiveRange::TryCommitSpillInDeferredBlock(
InstructionSequence* code, const InstructionOperand& spill_operand) {
DCHECK(!IsChild());
if (!FLAG_turbo_preprocess_ranges || IsEmpty() || HasNoSpillType() ||
spill_operand.IsConstant() || spill_operand.IsImmediate()) {
return false;
}
int count = 0;
for (const LiveRange* child = this; child != nullptr; child = child->next()) {
int first_instr = child->Start().ToInstructionIndex();
// If the range starts at instruction end, the first instruction index is
// the next one.
if (!child->Start().IsGapPosition() && !child->Start().IsStart()) {
++first_instr;
}
// We only look at where the range starts. It doesn't matter where it ends:
// if it ends past this block, then either there is a phi there already,
// or ResolveControlFlow will adapt the last instruction gap of this block
// as if there were a phi. In either case, data flow will be correct.
const InstructionBlock* block = code->GetInstructionBlock(first_instr);
// If we have slot uses in a subrange, bail out, because we need the value
// on the stack before that use.
bool has_slot_use = child->NextSlotPosition(child->Start()) != nullptr;
if (!block->IsDeferred()) {
if (child->spilled() || has_slot_use) {
TRACE(
"Live Range %d must be spilled at definition: found a "
"slot-requiring non-deferred child range %d.\n",
TopLevel()->id(), child->id());
return false;
}
} else {
if (child->spilled() || has_slot_use) ++count;
}
}
if (count == 0) return false;
spill_start_index_ = -1;
spilled_in_deferred_block_ = true;
TRACE("Live Range %d will be spilled only in deferred blocks.\n", id());
// If we have ranges that aren't spilled but require the operand on the stack,
// make sure we insert the spill.
for (const LiveRange* child = this; child != nullptr; child = child->next()) {
if (!child->spilled() &&
child->NextSlotPosition(child->Start()) != nullptr) {
auto instr = code->InstructionAt(child->Start().ToInstructionIndex());
// Insert spill at the end to let live range connections happen at START.
auto move =
instr->GetOrCreateParallelMove(Instruction::END, code->zone());
InstructionOperand assigned = child->GetAssignedOperand();
if (TopLevel()->has_slot_use()) {
bool found = false;
for (auto move_op : *move) {
if (move_op->IsEliminated()) continue;
if (move_op->source().Equals(assigned) &&
move_op->destination().Equals(spill_operand)) {
found = true;
break;
}
}
if (found) continue;
}
move->AddMove(assigned, spill_operand);
}
}
return true;
}
void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
const InstructionOperand& op,
bool might_be_duplicated) {
DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr);
DCHECK(!IsChild());
auto zone = sequence->zone();
for (auto to_spill = spills_at_definition_; to_spill != nullptr;
to_spill = to_spill->next) {
auto instr = sequence->InstructionAt(to_spill->gap_index);
auto move = instr->GetOrCreateParallelMove(Instruction::START, zone);
// Skip insertion if it's possible that the move exists already as a
// constraint move from a fixed output register to a slot.
if (might_be_duplicated) {
bool found = false;
for (auto move_op : *move) {
if (move_op->IsEliminated()) continue;
if (move_op->source().Equals(*to_spill->operand) &&
move_op->destination().Equals(op)) {
found = true;
break;
}
}
if (found) continue;
}
move->AddMove(*to_spill->operand, op);
}
}
UsePosition* LiveRange::FirstHintPosition(int* register_index) const { UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) { for (auto pos = first_pos_; pos != nullptr; pos = pos->next()) {
if (pos->HintRegister(register_index)) return pos; if (pos->HintRegister(register_index)) return pos;
...@@ -439,22 +309,6 @@ UsePosition* LiveRange::FirstHintPosition(int* register_index) const { ...@@ -439,22 +309,6 @@ UsePosition* LiveRange::FirstHintPosition(int* register_index) const {
} }
void LiveRange::SetSpillOperand(InstructionOperand* operand) {
DCHECK(HasNoSpillType());
DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
set_spill_type(SpillType::kSpillOperand);
spill_operand_ = operand;
}
void LiveRange::SetSpillRange(SpillRange* spill_range) {
DCHECK(!HasSpillOperand());
DCHECK(spill_range);
DCHECK(!IsChild());
spill_range_ = spill_range;
}
UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const { UsePosition* LiveRange::NextUsePosition(LifetimePosition start) const {
UsePosition* use_pos = last_processed_use_; UsePosition* use_pos = last_processed_use_;
if (use_pos == nullptr || use_pos->pos() > start) { if (use_pos == nullptr || use_pos->pos() > start) {
...@@ -518,6 +372,9 @@ bool LiveRange::CanBeSpilled(LifetimePosition pos) const { ...@@ -518,6 +372,9 @@ bool LiveRange::CanBeSpilled(LifetimePosition pos) const {
} }
bool LiveRange::IsTopLevel() const { return top_level_ == this; }
InstructionOperand LiveRange::GetAssignedOperand() const { InstructionOperand LiveRange::GetAssignedOperand() const {
if (HasRegisterAssigned()) { if (HasRegisterAssigned()) {
DCHECK(!spilled()); DCHECK(!spilled());
...@@ -539,20 +396,6 @@ InstructionOperand LiveRange::GetAssignedOperand() const { ...@@ -539,20 +396,6 @@ InstructionOperand LiveRange::GetAssignedOperand() const {
} }
AllocatedOperand LiveRange::GetSpillRangeOperand() const {
auto spill_range = GetSpillRange();
int index = spill_range->assigned_slot();
switch (kind()) {
case GENERAL_REGISTERS:
return StackSlotOperand(machine_type(), index);
case DOUBLE_REGISTERS:
return DoubleStackSlotOperand(machine_type(), index);
}
UNREACHABLE();
return StackSlotOperand(kMachNone, 0);
}
UseInterval* LiveRange::FirstSearchIntervalForPosition( UseInterval* LiveRange::FirstSearchIntervalForPosition(
LifetimePosition position) const { LifetimePosition position) const {
if (current_interval_ == nullptr) return first_interval_; if (current_interval_ == nullptr) return first_interval_;
...@@ -576,7 +419,20 @@ void LiveRange::AdvanceLastProcessedMarker( ...@@ -576,7 +419,20 @@ void LiveRange::AdvanceLastProcessedMarker(
} }
void LiveRange::SplitAt(LifetimePosition position, LiveRange* result, LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
int new_id = TopLevel()->GetNextChildId();
LiveRange* child = new (zone) LiveRange(new_id, machine_type(), TopLevel());
DetachAt(position, child, zone);
child->top_level_ = TopLevel();
child->next_ = next_;
next_ = child;
return child;
}
void LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
Zone* zone) { Zone* zone) {
DCHECK(Start() < position); DCHECK(Start() < position);
DCHECK(result->IsEmpty()); DCHECK(result->IsEmpty());
...@@ -607,70 +463,357 @@ void LiveRange::SplitAt(LifetimePosition position, LiveRange* result, ...@@ -607,70 +463,357 @@ void LiveRange::SplitAt(LifetimePosition position, LiveRange* result,
current->set_next(nullptr); current->set_next(nullptr);
break; break;
} }
current = next; current = next;
}
DCHECK(nullptr != after);
// Partition original use intervals to the two live ranges.
auto before = current;
result->last_interval_ =
(last_interval_ == before)
? after // Only interval in the range after split.
: last_interval_; // Last interval of the original range.
result->first_interval_ = after;
last_interval_ = before;
// Find the last use position before the split and the first use
// position after it.
auto use_after = first_pos_;
UsePosition* use_before = nullptr;
if (split_at_start) {
// The split position coincides with the beginning of a use interval (the
// end of a lifetime hole). Use at this position should be attributed to
// the split child because split child owns use interval covering it.
while (use_after != nullptr && use_after->pos() < position) {
use_before = use_after;
use_after = use_after->next();
}
} else {
while (use_after != nullptr && use_after->pos() <= position) {
use_before = use_after;
use_after = use_after->next();
}
}
// Partition original use positions to the two live ranges.
if (use_before != nullptr) {
use_before->set_next(nullptr);
} else {
first_pos_ = nullptr;
}
result->first_pos_ = use_after;
// Discard cached iteration state. It might be pointing
// to the use that no longer belongs to this live range.
last_processed_use_ = nullptr;
current_interval_ = nullptr;
// Invalidate size and weight of this range. The child range has them
// invalid at construction.
size_ = kInvalidSize;
weight_ = kInvalidWeight;
#ifdef DEBUG
Verify();
result->Verify();
#endif
}
void LiveRange::AppendAsChild(TopLevelLiveRange* other) {
next_ = other;
other->UpdateParentForAllChildren(TopLevel());
TopLevel()->UpdateSpillRangePostMerge(other);
}
void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
LiveRange* child = this;
for (; child != nullptr; child = child->next()) {
child->top_level_ = new_top_level;
}
}
void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
const InstructionOperand& spill_op) {
for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) {
DCHECK(Start() <= pos->pos() && pos->pos() <= End());
if (!pos->HasOperand()) continue;
switch (pos->type()) {
case UsePositionType::kRequiresSlot:
DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot());
InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
break;
case UsePositionType::kRequiresRegister:
DCHECK(op.IsRegister() || op.IsDoubleRegister());
// Fall through.
case UsePositionType::kAny:
InstructionOperand::ReplaceWith(pos->operand(), &op);
break;
}
}
}
// This implements an ordering on live ranges so that they are ordered by their
// start positions. This is needed for the correctness of the register
// allocation algorithm. If two live ranges start at the same offset then there
// is a tie breaker based on where the value is first used. This part of the
// ordering is merely a heuristic.
bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
LifetimePosition start = Start();
LifetimePosition other_start = other->Start();
if (start == other_start) {
UsePosition* pos = first_pos();
if (pos == nullptr) return false;
UsePosition* other_pos = other->first_pos();
if (other_pos == nullptr) return true;
return pos->pos() < other_pos->pos();
}
return start < other_start;
}
void LiveRange::SetUseHints(int register_index) {
for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) {
if (!pos->HasOperand()) continue;
switch (pos->type()) {
case UsePositionType::kRequiresSlot:
break;
case UsePositionType::kRequiresRegister:
case UsePositionType::kAny:
pos->set_assigned_register(register_index);
break;
}
}
}
bool LiveRange::CanCover(LifetimePosition position) const {
if (IsEmpty()) return false;
return Start() <= position && position < End();
}
bool LiveRange::Covers(LifetimePosition position) const {
if (!CanCover(position)) return false;
auto start_search = FirstSearchIntervalForPosition(position);
for (auto interval = start_search; interval != nullptr;
interval = interval->next()) {
DCHECK(interval->next() == nullptr ||
interval->next()->start() >= interval->start());
AdvanceLastProcessedMarker(interval, position);
if (interval->Contains(position)) return true;
if (interval->start() > position) return false;
}
return false;
}
LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
auto b = other->first_interval();
if (b == nullptr) return LifetimePosition::Invalid();
auto advance_last_processed_up_to = b->start();
auto a = FirstSearchIntervalForPosition(b->start());
while (a != nullptr && b != nullptr) {
if (a->start() > other->End()) break;
if (b->start() > End()) break;
auto cur_intersection = a->Intersect(b);
if (cur_intersection.IsValid()) {
return cur_intersection;
}
if (a->start() < b->start()) {
a = a->next();
if (a == nullptr || a->start() > other->End()) break;
AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
} else {
b = b->next();
}
}
return LifetimePosition::Invalid();
}
unsigned LiveRange::GetSize() {
if (size_ == kInvalidSize) {
size_ = 0;
for (auto interval = first_interval(); interval != nullptr;
interval = interval->next()) {
size_ += (interval->end().value() - interval->start().value());
}
}
return static_cast<unsigned>(size_);
}
struct TopLevelLiveRange::SpillAtDefinitionList : ZoneObject {
SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
SpillAtDefinitionList* next)
: gap_index(gap_index), operand(operand), next(next) {}
const int gap_index;
InstructionOperand* const operand;
SpillAtDefinitionList* const next;
};
TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineType machine_type)
: LiveRange(0, machine_type, this),
vreg_(vreg),
last_child_id_(0),
splintered_from_(nullptr),
spill_operand_(nullptr),
spills_at_definition_(nullptr),
spilled_in_deferred_blocks_(false),
spill_start_index_(kMaxInt) {
bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
}
void TopLevelLiveRange::SpillAtDefinition(Zone* zone, int gap_index,
InstructionOperand* operand) {
DCHECK(HasNoSpillType());
spills_at_definition_ = new (zone)
SpillAtDefinitionList(gap_index, operand, spills_at_definition_);
}
bool TopLevelLiveRange::TryCommitSpillInDeferredBlock(
InstructionSequence* code, const InstructionOperand& spill_operand) {
if (!FLAG_turbo_preprocess_ranges || IsEmpty() || HasNoSpillType() ||
spill_operand.IsConstant() || spill_operand.IsImmediate()) {
return false;
}
int count = 0;
for (const LiveRange* child = this; child != nullptr; child = child->next()) {
int first_instr = child->Start().ToInstructionIndex();
// If the range starts at instruction end, the first instruction index is
// the next one.
if (!child->Start().IsGapPosition() && !child->Start().IsStart()) {
++first_instr;
}
// We only look at where the range starts. It doesn't matter where it ends:
// if it ends past this block, then either there is a phi there already,
// or ResolveControlFlow will adapt the last instruction gap of this block
// as if there were a phi. In either case, data flow will be correct.
const InstructionBlock* block = code->GetInstructionBlock(first_instr);
// If we have slot uses in a subrange, bail out, because we need the value
// on the stack before that use.
bool has_slot_use = child->NextSlotPosition(child->Start()) != nullptr;
if (!block->IsDeferred()) {
if (child->spilled() || has_slot_use) {
TRACE(
"Live Range %d must be spilled at definition: found a "
"slot-requiring non-deferred child range %d.\n",
TopLevel()->vreg(), child->relative_id());
return false;
}
} else {
if (child->spilled() || has_slot_use) ++count;
}
}
if (count == 0) return false;
spill_start_index_ = -1;
spilled_in_deferred_blocks_ = true;
TRACE("Live Range %d will be spilled only in deferred blocks.\n", vreg());
// If we have ranges that aren't spilled but require the operand on the stack,
// make sure we insert the spill.
for (const LiveRange* child = this; child != nullptr; child = child->next()) {
if (!child->spilled() &&
child->NextSlotPosition(child->Start()) != nullptr) {
auto instr = code->InstructionAt(child->Start().ToInstructionIndex());
// Insert spill at the end to let live range connections happen at START.
auto move =
instr->GetOrCreateParallelMove(Instruction::END, code->zone());
InstructionOperand assigned = child->GetAssignedOperand();
if (TopLevel()->has_slot_use()) {
bool found = false;
for (auto move_op : *move) {
if (move_op->IsEliminated()) continue;
if (move_op->source().Equals(assigned) &&
move_op->destination().Equals(spill_operand)) {
found = true;
break;
}
}
if (found) continue;
} }
DCHECK(nullptr != after);
// Partition original use intervals to the two live ranges. move->AddMove(assigned, spill_operand);
auto before = current; }
result->last_interval_ = }
(last_interval_ == before)
? after // Only interval in the range after split.
: last_interval_; // Last interval of the original range.
result->first_interval_ = after;
last_interval_ = before;
// Find the last use position before the split and the first use return true;
// position after it. }
auto use_after = first_pos_;
UsePosition* use_before = nullptr;
if (split_at_start) { void TopLevelLiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
// The split position coincides with the beginning of a use interval (the const InstructionOperand& op,
// end of a lifetime hole). Use at this position should be attributed to bool might_be_duplicated) {
// the split child because split child owns use interval covering it. DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr);
while (use_after != nullptr && use_after->pos() < position) { auto zone = sequence->zone();
use_before = use_after;
use_after = use_after->next(); for (auto to_spill = spills_at_definition_; to_spill != nullptr;
to_spill = to_spill->next) {
auto instr = sequence->InstructionAt(to_spill->gap_index);
auto move = instr->GetOrCreateParallelMove(Instruction::START, zone);
// Skip insertion if it's possible that the move exists already as a
// constraint move from a fixed output register to a slot.
if (might_be_duplicated) {
bool found = false;
for (auto move_op : *move) {
if (move_op->IsEliminated()) continue;
if (move_op->source().Equals(*to_spill->operand) &&
move_op->destination().Equals(op)) {
found = true;
break;
} }
} else {
while (use_after != nullptr && use_after->pos() <= position) {
use_before = use_after;
use_after = use_after->next();
} }
if (found) continue;
} }
move->AddMove(*to_spill->operand, op);
// Partition original use positions to the two live ranges.
if (use_before != nullptr) {
use_before->set_next(nullptr);
} else {
first_pos_ = nullptr;
} }
result->first_pos_ = use_after; }
// Discard cached iteration state. It might be pointing
// to the use that no longer belongs to this live range.
last_processed_use_ = nullptr;
current_interval_ = nullptr;
// Link the new live range in the chain before any of the other void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
// ranges linked from the range before the split. DCHECK(HasNoSpillType());
result->parent_ = (parent_ == nullptr) ? this : parent_; DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
result->next_ = next_; set_spill_type(SpillType::kSpillOperand);
next_ = result; spill_operand_ = operand;
}
// Invalidate size and weight of this range. The child range has them
// invalid at construction. void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
size_ = kInvalidSize; DCHECK(!HasSpillOperand());
weight_ = kInvalidWeight; DCHECK(spill_range);
#ifdef DEBUG spill_range_ = spill_range;
Verify(); }
result->Verify();
#endif
AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand() const {
auto spill_range = GetSpillRange();
int index = spill_range->assigned_slot();
switch (kind()) {
case GENERAL_REGISTERS:
return StackSlotOperand(machine_type(), index);
case DOUBLE_REGISTERS:
return DoubleStackSlotOperand(machine_type(), index);
}
UNREACHABLE();
return StackSlotOperand(kMachNone, 0);
} }
void LiveRange::Splinter(LifetimePosition start, LifetimePosition end, void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
LiveRange* result, Zone* zone) { TopLevelLiveRange* result, Zone* zone) {
DCHECK(start != Start() || end != End()); DCHECK(start != Start() || end != End());
DCHECK(start < end); DCHECK(start < end);
...@@ -678,21 +821,21 @@ void LiveRange::Splinter(LifetimePosition start, LifetimePosition end, ...@@ -678,21 +821,21 @@ void LiveRange::Splinter(LifetimePosition start, LifetimePosition end,
if (start <= Start()) { if (start <= Start()) {
DCHECK(end < End()); DCHECK(end < End());
SplitAt(end, result, zone); DetachAt(end, result, zone);
next_ = nullptr; next_ = nullptr;
} else if (end >= End()) { } else if (end >= End()) {
DCHECK(start > Start()); DCHECK(start > Start());
SplitAt(start, result, zone); DetachAt(start, result, zone);
next_ = nullptr; next_ = nullptr;
} else { } else {
DCHECK(start < End() && Start() < end); DCHECK(start < End() && Start() < end);
const int kInvalidId = std::numeric_limits<int>::max(); const int kInvalidId = std::numeric_limits<int>::max();
SplitAt(start, result, zone); DetachAt(start, result, zone);
LiveRange end_part(kInvalidId, this->machine_type()); LiveRange end_part(kInvalidId, this->machine_type(), nullptr);
result->SplitAt(end, &end_part, zone); result->DetachAt(end, &end_part, zone);
next_ = end_part.next_; next_ = end_part.next_;
last_interval_->set_next(end_part.first_interval_); last_interval_->set_next(end_part.first_interval_);
...@@ -709,16 +852,15 @@ void LiveRange::Splinter(LifetimePosition start, LifetimePosition end, ...@@ -709,16 +852,15 @@ void LiveRange::Splinter(LifetimePosition start, LifetimePosition end,
} }
} }
result->next_ = nullptr; result->next_ = nullptr;
result->parent_ = nullptr; result->top_level_ = result;
result->SetSplinteredFrom(this); result->SetSplinteredFrom(this);
} }
void LiveRange::SetSplinteredFrom(LiveRange* splinter_parent) { void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
// The splinter parent is always the original "Top". // The splinter parent is always the original "Top".
DCHECK(splinter_parent->Start() < Start()); DCHECK(splinter_parent->Start() < Start());
DCHECK(!splinter_parent->IsChild());
splintered_from_ = splinter_parent; splintered_from_ = splinter_parent;
if (!HasSpillOperand()) { if (!HasSpillOperand()) {
...@@ -727,38 +869,20 @@ void LiveRange::SetSplinteredFrom(LiveRange* splinter_parent) { ...@@ -727,38 +869,20 @@ void LiveRange::SetSplinteredFrom(LiveRange* splinter_parent) {
} }
void LiveRange::AppendChild(LiveRange* other) { void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
DCHECK(!other->IsChild());
next_ = other;
other->UpdateParentForAllChildren(TopLevel());
TopLevel()->UpdateSpillRangePostMerge(other);
}
void LiveRange::UpdateSpillRangePostMerge(LiveRange* merged) {
DCHECK(!IsChild());
DCHECK(merged->TopLevel() == this); DCHECK(merged->TopLevel() == this);
if (HasNoSpillType() && merged->HasSpillRange()) { if (HasNoSpillType() && merged->HasSpillRange()) {
set_spill_type(merged->spill_type()); set_spill_type(merged->spill_type());
DCHECK(GetSpillRange()->live_ranges().size() > 0); DCHECK(GetSpillRange()->live_ranges().size() > 0);
merged->spill_range_ = nullptr; merged->spill_range_ = nullptr;
merged->bits_ = SpillTypeField::update(merged->bits_, merged->bits_ =
LiveRange::SpillType::kNoSpillType); SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
}
}
void LiveRange::UpdateParentForAllChildren(LiveRange* new_parent) {
LiveRange* child = this;
for (; child != nullptr; child = child->next()) {
child->parent_ = new_parent;
} }
} }
LiveRange* LiveRange::GetLastChild() { LiveRange* TopLevelLiveRange::GetLastChild() {
LiveRange* ret = this; LiveRange* ret = this;
for (; ret->next() != nullptr; ret = ret->next()) { for (; ret->next() != nullptr; ret = ret->next()) {
} }
...@@ -766,16 +890,17 @@ LiveRange* LiveRange::GetLastChild() { ...@@ -766,16 +890,17 @@ LiveRange* LiveRange::GetLastChild() {
} }
void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) { void TopLevelLiveRange::Merge(TopLevelLiveRange* other,
DCHECK(!IsChild()); RegisterAllocationData* data) {
DCHECK(!other->IsChild());
DCHECK(Start() < other->Start()); DCHECK(Start() < other->Start());
data->live_ranges()[other->vreg()] = nullptr;
LiveRange* last_other = other->GetLastChild(); LiveRange* last_other = other->GetLastChild();
LiveRange* last_me = GetLastChild(); LiveRange* last_me = GetLastChild();
// Simple case: we just append at the end. // Simple case: we just append at the end.
if (last_me->End() <= other->Start()) return last_me->AppendChild(other); if (last_me->End() <= other->Start()) return last_me->AppendAsChild(other);
DCHECK(last_me->End() > last_other->End()); DCHECK(last_me->End() > last_other->End());
...@@ -793,9 +918,8 @@ void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) { ...@@ -793,9 +918,8 @@ void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) {
// register allocation splitting. // register allocation splitting.
LiveRange* after = insertion_point->next(); LiveRange* after = insertion_point->next();
if (insertion_point->End() > other->Start()) { if (insertion_point->End() > other->Start()) {
LiveRange* new_after = data->NewChildRangeFor(insertion_point); LiveRange* new_after =
insertion_point->SplitAt(other->Start(), new_after, insertion_point->SplitAt(other->Start(), data->allocation_zone());
data->allocation_zone());
new_after->set_spilled(insertion_point->spilled()); new_after->set_spilled(insertion_point->spilled());
if (!new_after->spilled()) if (!new_after->spilled())
new_after->set_assigned_register(insertion_point->assigned_register()); new_after->set_assigned_register(insertion_point->assigned_register());
...@@ -809,27 +933,8 @@ void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) { ...@@ -809,27 +933,8 @@ void LiveRange::Merge(LiveRange* other, RegisterAllocationData* data) {
} }
// This implements an ordering on live ranges so that they are ordered by their void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
// start positions. This is needed for the correctness of the register TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
// allocation algorithm. If two live ranges start at the same offset then there
// is a tie breaker based on where the value is first used. This part of the
// ordering is merely a heuristic.
bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
LifetimePosition start = Start();
LifetimePosition other_start = other->Start();
if (start == other_start) {
UsePosition* pos = first_pos();
if (pos == nullptr) return false;
UsePosition* other_pos = other->first_pos();
if (other_pos == nullptr) return true;
return pos->pos() < other_pos->pos();
}
return start < other_start;
}
void LiveRange::ShortenTo(LifetimePosition start) {
TRACE("Shorten live range %d to [%d\n", id_, start.value());
DCHECK(first_interval_ != nullptr); DCHECK(first_interval_ != nullptr);
DCHECK(first_interval_->start() <= start); DCHECK(first_interval_->start() <= start);
DCHECK(start < first_interval_->end()); DCHECK(start < first_interval_->end());
...@@ -837,9 +942,9 @@ void LiveRange::ShortenTo(LifetimePosition start) { ...@@ -837,9 +942,9 @@ void LiveRange::ShortenTo(LifetimePosition start) {
} }
void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end, void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
Zone* zone) { LifetimePosition end, Zone* zone) {
TRACE("Ensure live range %d in interval [%d %d[\n", id_, start.value(), TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
end.value()); end.value());
auto new_end = end; auto new_end = end;
while (first_interval_ != nullptr && first_interval_->start() <= end) { while (first_interval_ != nullptr && first_interval_->start() <= end) {
...@@ -858,9 +963,9 @@ void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end, ...@@ -858,9 +963,9 @@ void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end,
} }
void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end, void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
Zone* zone) { LifetimePosition end, Zone* zone) {
TRACE("Add to live range %d interval [%d %d[\n", id_, start.value(), TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
end.value()); end.value());
if (first_interval_ == nullptr) { if (first_interval_ == nullptr) {
auto interval = new (zone) UseInterval(start, end); auto interval = new (zone) UseInterval(start, end);
...@@ -885,9 +990,9 @@ void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end, ...@@ -885,9 +990,9 @@ void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end,
} }
void LiveRange::AddUsePosition(UsePosition* use_pos) { void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
auto pos = use_pos->pos(); auto pos = use_pos->pos();
TRACE("Add to live range %d use position %d\n", id_, pos.value()); TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
UsePosition* prev_hint = nullptr; UsePosition* prev_hint = nullptr;
UsePosition* prev = nullptr; UsePosition* prev = nullptr;
auto current = first_pos_; auto current = first_pos_;
...@@ -911,100 +1016,6 @@ void LiveRange::AddUsePosition(UsePosition* use_pos) { ...@@ -911,100 +1016,6 @@ void LiveRange::AddUsePosition(UsePosition* use_pos) {
} }
void LiveRange::ConvertUsesToOperand(const InstructionOperand& op,
const InstructionOperand& spill_op) {
for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) {
DCHECK(Start() <= pos->pos() && pos->pos() <= End());
if (!pos->HasOperand()) continue;
switch (pos->type()) {
case UsePositionType::kRequiresSlot:
DCHECK(spill_op.IsStackSlot() || spill_op.IsDoubleStackSlot());
InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
break;
case UsePositionType::kRequiresRegister:
DCHECK(op.IsRegister() || op.IsDoubleRegister());
// Fall through.
case UsePositionType::kAny:
InstructionOperand::ReplaceWith(pos->operand(), &op);
break;
}
}
}
void LiveRange::SetUseHints(int register_index) {
for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) {
if (!pos->HasOperand()) continue;
switch (pos->type()) {
case UsePositionType::kRequiresSlot:
break;
case UsePositionType::kRequiresRegister:
case UsePositionType::kAny:
pos->set_assigned_register(register_index);
break;
}
}
}
bool LiveRange::CanCover(LifetimePosition position) const {
if (IsEmpty()) return false;
return Start() <= position && position < End();
}
bool LiveRange::Covers(LifetimePosition position) const {
if (!CanCover(position)) return false;
auto start_search = FirstSearchIntervalForPosition(position);
for (auto interval = start_search; interval != nullptr;
interval = interval->next()) {
DCHECK(interval->next() == nullptr ||
interval->next()->start() >= interval->start());
AdvanceLastProcessedMarker(interval, position);
if (interval->Contains(position)) return true;
if (interval->start() > position) return false;
}
return false;
}
LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
auto b = other->first_interval();
if (b == nullptr) return LifetimePosition::Invalid();
auto advance_last_processed_up_to = b->start();
auto a = FirstSearchIntervalForPosition(b->start());
while (a != nullptr && b != nullptr) {
if (a->start() > other->End()) break;
if (b->start() > End()) break;
auto cur_intersection = a->Intersect(b);
if (cur_intersection.IsValid()) {
return cur_intersection;
}
if (a->start() < b->start()) {
a = a->next();
if (a == nullptr || a->start() > other->End()) break;
AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
} else {
b = b->next();
}
}
return LifetimePosition::Invalid();
}
unsigned LiveRange::GetSize() {
if (size_ == kInvalidSize) {
size_ = 0;
for (auto interval = first_interval(); interval != nullptr;
interval = interval->next()) {
size_ += (interval->end().value() - interval->start().value());
}
}
return static_cast<unsigned>(size_);
}
static bool AreUseIntervalsIntersecting(UseInterval* interval1, static bool AreUseIntervalsIntersecting(UseInterval* interval1,
UseInterval* interval2) { UseInterval* interval2) {
while (interval1 != nullptr && interval2 != nullptr) { while (interval1 != nullptr && interval2 != nullptr) {
...@@ -1027,9 +1038,10 @@ static bool AreUseIntervalsIntersecting(UseInterval* interval1, ...@@ -1027,9 +1038,10 @@ static bool AreUseIntervalsIntersecting(UseInterval* interval1,
std::ostream& operator<<(std::ostream& os, std::ostream& operator<<(std::ostream& os,
const PrintableLiveRange& printable_range) { const PrintableLiveRange& printable_range) {
const LiveRange* range = printable_range.range_; const LiveRange* range = printable_range.range_;
os << "Range: " << range->id() << " "; os << "Range: " << range->TopLevel()->vreg() << ":" << range->relative_id()
if (range->is_phi()) os << "phi "; << " ";
if (range->is_non_loop_phi()) os << "nlphi "; if (range->TopLevel()->is_phi()) os << "phi ";
if (range->TopLevel()->is_non_loop_phi()) os << "nlphi ";
os << "{" << std::endl; os << "{" << std::endl;
auto interval = range->first_interval(); auto interval = range->first_interval();
...@@ -1053,7 +1065,7 @@ std::ostream& operator<<(std::ostream& os, ...@@ -1053,7 +1065,7 @@ std::ostream& operator<<(std::ostream& os,
} }
SpillRange::SpillRange(LiveRange* parent, Zone* zone) SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
: live_ranges_(zone), : live_ranges_(zone),
assigned_slot_(kUnassignedSlot), assigned_slot_(kUnassignedSlot),
byte_width_(GetByteWidth(parent->machine_type())), byte_width_(GetByteWidth(parent->machine_type())),
...@@ -1061,12 +1073,11 @@ SpillRange::SpillRange(LiveRange* parent, Zone* zone) ...@@ -1061,12 +1073,11 @@ SpillRange::SpillRange(LiveRange* parent, Zone* zone)
// Spill ranges are created for top level, non-splintered ranges. This is so // Spill ranges are created for top level, non-splintered ranges. This is so
// that, when merging decisions are made, we consider the full extent of the // that, when merging decisions are made, we consider the full extent of the
// virtual register, and avoid clobbering it. // virtual register, and avoid clobbering it.
DCHECK(!parent->IsChild());
DCHECK(!parent->IsSplinter()); DCHECK(!parent->IsSplinter());
UseInterval* result = nullptr; UseInterval* result = nullptr;
UseInterval* node = nullptr; UseInterval* node = nullptr;
// Copy the intervals for all ranges. // Copy the intervals for all ranges.
for (auto range = parent; range != nullptr; range = range->next()) { for (LiveRange* range = parent; range != nullptr; range = range->next()) {
auto src = range->first_interval(); auto src = range->first_interval();
while (src != nullptr) { while (src != nullptr) {
auto new_node = new (zone) UseInterval(src->start(), src->end()); auto new_node = new (zone) UseInterval(src->start(), src->end());
...@@ -1227,7 +1238,7 @@ MachineType RegisterAllocationData::MachineTypeFor(int virtual_register) { ...@@ -1227,7 +1238,7 @@ MachineType RegisterAllocationData::MachineTypeFor(int virtual_register) {
} }
LiveRange* RegisterAllocationData::LiveRangeFor(int index) { TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(int index) {
if (index >= static_cast<int>(live_ranges().size())) { if (index >= static_cast<int>(live_ranges().size())) {
live_ranges().resize(index + 1, nullptr); live_ranges().resize(index + 1, nullptr);
} }
...@@ -1240,27 +1251,26 @@ LiveRange* RegisterAllocationData::LiveRangeFor(int index) { ...@@ -1240,27 +1251,26 @@ LiveRange* RegisterAllocationData::LiveRangeFor(int index) {
} }
LiveRange* RegisterAllocationData::NewLiveRange(int index, TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
MachineType machine_type) { int index, MachineType machine_type) {
return new (allocation_zone()) LiveRange(index, machine_type); return new (allocation_zone()) TopLevelLiveRange(index, machine_type);
} }
LiveRange* RegisterAllocationData::NextLiveRange(MachineType machine_type) { int RegisterAllocationData::GetNextLiveRangeId() {
int vreg = virtual_register_count_++; int vreg = virtual_register_count_++;
if (vreg >= static_cast<int>(live_ranges().size())) { if (vreg >= static_cast<int>(live_ranges().size())) {
live_ranges().resize(vreg + 1, nullptr); live_ranges().resize(vreg + 1, nullptr);
} }
LiveRange* ret = NewLiveRange(vreg, machine_type); return vreg;
return ret;
} }
LiveRange* RegisterAllocationData::NewChildRangeFor(LiveRange* range) { TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
auto child = NextLiveRange(range->machine_type()); MachineType machine_type) {
DCHECK_NULL(live_ranges()[child->id()]); int vreg = GetNextLiveRangeId();
live_ranges()[child->id()] = child; TopLevelLiveRange* ret = NewLiveRange(vreg, machine_type);
return child; return ret;
} }
...@@ -1284,6 +1294,12 @@ RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor( ...@@ -1284,6 +1294,12 @@ RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
} }
RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
TopLevelLiveRange* top_range) {
return GetPhiMapValueFor(top_range->vreg());
}
bool RegisterAllocationData::ExistsUseWithoutDefinition() { bool RegisterAllocationData::ExistsUseWithoutDefinition() {
bool found = false; bool found = false;
BitVector::Iterator iterator(live_in_sets()[0]); BitVector::Iterator iterator(live_in_sets()[0]);
...@@ -1292,7 +1308,7 @@ bool RegisterAllocationData::ExistsUseWithoutDefinition() { ...@@ -1292,7 +1308,7 @@ bool RegisterAllocationData::ExistsUseWithoutDefinition() {
int operand_index = iterator.Current(); int operand_index = iterator.Current();
PrintF("Register allocator error: live v%d reached first block.\n", PrintF("Register allocator error: live v%d reached first block.\n",
operand_index); operand_index);
LiveRange* range = LiveRangeFor(operand_index); LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
PrintF(" (first use is at %d)\n", range->first_pos()->pos().value()); PrintF(" (first use is at %d)\n", range->first_pos()->pos().value());
if (debug_name() == nullptr) { if (debug_name() == nullptr) {
PrintF("\n"); PrintF("\n");
...@@ -1306,8 +1322,7 @@ bool RegisterAllocationData::ExistsUseWithoutDefinition() { ...@@ -1306,8 +1322,7 @@ bool RegisterAllocationData::ExistsUseWithoutDefinition() {
SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
LiveRange* range) { TopLevelLiveRange* range) {
DCHECK(!range->IsChild());
DCHECK(!range->HasSpillOperand()); DCHECK(!range->HasSpillOperand());
SpillRange* spill_range = range->GetAllocatedSpillRange(); SpillRange* spill_range = range->GetAllocatedSpillRange();
...@@ -1315,7 +1330,7 @@ SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( ...@@ -1315,7 +1330,7 @@ SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
DCHECK(!range->IsSplinter()); DCHECK(!range->IsSplinter());
spill_range = new (allocation_zone()) SpillRange(range, allocation_zone()); spill_range = new (allocation_zone()) SpillRange(range, allocation_zone());
} }
range->set_spill_type(LiveRange::SpillType::kSpillRange); range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
spill_ranges().insert(spill_range); spill_ranges().insert(spill_range);
return spill_range; return spill_range;
...@@ -1323,9 +1338,8 @@ SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange( ...@@ -1323,9 +1338,8 @@ SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange( SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
LiveRange* range) { TopLevelLiveRange* range) {
DCHECK(!range->HasSpillOperand()); DCHECK(!range->HasSpillOperand());
DCHECK(!range->IsChild());
DCHECK(!range->IsSplinter()); DCHECK(!range->IsSplinter());
auto spill_range = auto spill_range =
new (allocation_zone()) SpillRange(range, allocation_zone()); new (allocation_zone()) SpillRange(range, allocation_zone());
...@@ -1404,8 +1418,8 @@ void RegisterAllocationData::Print(const MoveOperands* move) { ...@@ -1404,8 +1418,8 @@ void RegisterAllocationData::Print(const MoveOperands* move) {
void RegisterAllocationData::Print(const SpillRange* spill_range) { void RegisterAllocationData::Print(const SpillRange* spill_range) {
OFStream os(stdout); OFStream os(stdout);
os << "{" << std::endl; os << "{" << std::endl;
for (LiveRange* range : spill_range->live_ranges()) { for (TopLevelLiveRange* range : spill_range->live_ranges()) {
os << range->id() << " "; os << range->vreg() << " ";
} }
os << std::endl; os << std::endl;
...@@ -1451,7 +1465,7 @@ InstructionOperand* ConstraintBuilder::AllocateFixed( ...@@ -1451,7 +1465,7 @@ InstructionOperand* ConstraintBuilder::AllocateFixed(
InstructionOperand::ReplaceWith(operand, &allocated); InstructionOperand::ReplaceWith(operand, &allocated);
if (is_tagged) { if (is_tagged) {
TRACE("Fixed reg is tagged at %d\n", pos); TRACE("Fixed reg is tagged at %d\n", pos);
auto instr = InstructionAt(pos); auto instr = code()->InstructionAt(pos);
if (instr->HasReferenceMap()) { if (instr->HasReferenceMap()) {
instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand)); instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
} }
...@@ -1483,13 +1497,13 @@ void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) { ...@@ -1483,13 +1497,13 @@ void ConstraintBuilder::MeetRegisterConstraints(const InstructionBlock* block) {
void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock( void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
const InstructionBlock* block) { const InstructionBlock* block) {
int end = block->last_instruction_index(); int end = block->last_instruction_index();
auto last_instruction = InstructionAt(end); auto last_instruction = code()->InstructionAt(end);
for (size_t i = 0; i < last_instruction->OutputCount(); i++) { for (size_t i = 0; i < last_instruction->OutputCount(); i++) {
auto output_operand = last_instruction->OutputAt(i); auto output_operand = last_instruction->OutputAt(i);
DCHECK(!output_operand->IsConstant()); DCHECK(!output_operand->IsConstant());
auto output = UnallocatedOperand::cast(output_operand); auto output = UnallocatedOperand::cast(output_operand);
int output_vreg = output->virtual_register(); int output_vreg = output->virtual_register();
auto range = LiveRangeFor(output_vreg); auto range = data()->GetOrCreateLiveRangeFor(output_vreg);
bool assigned = false; bool assigned = false;
if (output->HasFixedPolicy()) { if (output->HasFixedPolicy()) {
AllocateFixed(output, -1, false); AllocateFixed(output, -1, false);
...@@ -1527,7 +1541,7 @@ void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock( ...@@ -1527,7 +1541,7 @@ void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
auto first = InstructionAt(instr_index); auto first = code()->InstructionAt(instr_index);
// Handle fixed temporaries. // Handle fixed temporaries.
for (size_t i = 0; i < first->TempCount(); i++) { for (size_t i = 0; i < first->TempCount(); i++) {
auto temp = UnallocatedOperand::cast(first->TempAt(i)); auto temp = UnallocatedOperand::cast(first->TempAt(i));
...@@ -1538,18 +1552,19 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { ...@@ -1538,18 +1552,19 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
InstructionOperand* output = first->OutputAt(i); InstructionOperand* output = first->OutputAt(i);
if (output->IsConstant()) { if (output->IsConstant()) {
int output_vreg = ConstantOperand::cast(output)->virtual_register(); int output_vreg = ConstantOperand::cast(output)->virtual_register();
auto range = LiveRangeFor(output_vreg); auto range = data()->GetOrCreateLiveRangeFor(output_vreg);
range->SetSpillStartIndex(instr_index + 1); range->SetSpillStartIndex(instr_index + 1);
range->SetSpillOperand(output); range->SetSpillOperand(output);
continue; continue;
} }
auto first_output = UnallocatedOperand::cast(output); auto first_output = UnallocatedOperand::cast(output);
auto range = LiveRangeFor(first_output->virtual_register()); auto range =
data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
bool assigned = false; bool assigned = false;
if (first_output->HasFixedPolicy()) { if (first_output->HasFixedPolicy()) {
int output_vreg = first_output->virtual_register(); int output_vreg = first_output->virtual_register();
UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg); UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
bool is_tagged = IsReference(output_vreg); bool is_tagged = code()->IsReference(output_vreg);
AllocateFixed(first_output, instr_index, is_tagged); AllocateFixed(first_output, instr_index, is_tagged);
// This value is produced on the stack, we never need to spill it. // This value is produced on the stack, we never need to spill it.
...@@ -1575,7 +1590,7 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) { ...@@ -1575,7 +1590,7 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
auto second = InstructionAt(instr_index); auto second = code()->InstructionAt(instr_index);
// Handle fixed input operands of second instruction. // Handle fixed input operands of second instruction.
for (size_t i = 0; i < second->InputCount(); i++) { for (size_t i = 0; i < second->InputCount(); i++) {
auto input = second->InputAt(i); auto input = second->InputAt(i);
...@@ -1584,7 +1599,7 @@ void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { ...@@ -1584,7 +1599,7 @@ void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
if (cur_input->HasFixedPolicy()) { if (cur_input->HasFixedPolicy()) {
int input_vreg = cur_input->virtual_register(); int input_vreg = cur_input->virtual_register();
UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg); UnallocatedOperand input_copy(UnallocatedOperand::ANY, input_vreg);
bool is_tagged = IsReference(input_vreg); bool is_tagged = code()->IsReference(input_vreg);
AllocateFixed(cur_input, instr_index, is_tagged); AllocateFixed(cur_input, instr_index, is_tagged);
data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input); data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
} }
...@@ -1604,13 +1619,14 @@ void ConstraintBuilder::MeetConstraintsBefore(int instr_index) { ...@@ -1604,13 +1619,14 @@ void ConstraintBuilder::MeetConstraintsBefore(int instr_index) {
cur_input->set_virtual_register(second_output->virtual_register()); cur_input->set_virtual_register(second_output->virtual_register());
auto gap_move = data()->AddGapMove(instr_index, Instruction::END, auto gap_move = data()->AddGapMove(instr_index, Instruction::END,
input_copy, *cur_input); input_copy, *cur_input);
if (IsReference(input_vreg) && !IsReference(output_vreg)) { if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
if (second->HasReferenceMap()) { if (second->HasReferenceMap()) {
RegisterAllocationData::DelayedReference delayed_reference = { RegisterAllocationData::DelayedReference delayed_reference = {
second->reference_map(), &gap_move->source()}; second->reference_map(), &gap_move->source()};
data()->delayed_references().push_back(delayed_reference); data()->delayed_references().push_back(delayed_reference);
} }
} else if (!IsReference(input_vreg) && IsReference(output_vreg)) { } else if (!code()->IsReference(input_vreg) &&
code()->IsReference(output_vreg)) {
// The input is assumed to immediately have a tagged representation, // The input is assumed to immediately have a tagged representation,
// before the pointer map can be used. I.e. the pointer map at the // before the pointer map can be used. I.e. the pointer map at the
// instruction will include the output operand (whose value at the // instruction will include the output operand (whose value at the
...@@ -1643,10 +1659,11 @@ void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) { ...@@ -1643,10 +1659,11 @@ void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
auto move = data()->AddGapMove(cur_block->last_instruction_index(), auto move = data()->AddGapMove(cur_block->last_instruction_index(),
Instruction::END, input, output); Instruction::END, input, output);
map_value->AddOperand(&move->destination()); map_value->AddOperand(&move->destination());
DCHECK(!InstructionAt(cur_block->last_instruction_index()) DCHECK(!code()
->InstructionAt(cur_block->last_instruction_index())
->HasReferenceMap()); ->HasReferenceMap());
} }
auto live_range = LiveRangeFor(phi_vreg); auto live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
int gap_index = block->first_instruction_index(); int gap_index = block->first_instruction_index();
live_range->SpillAtDefinition(allocation_zone(), gap_index, &output); live_range->SpillAtDefinition(allocation_zone(), gap_index, &output);
live_range->SetSpillStartIndex(gap_index); live_range->SetSpillStartIndex(gap_index);
...@@ -1702,7 +1719,7 @@ void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block, ...@@ -1702,7 +1719,7 @@ void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
BitVector::Iterator iterator(live_out); BitVector::Iterator iterator(live_out);
while (!iterator.Done()) { while (!iterator.Done()) {
int operand_index = iterator.Current(); int operand_index = iterator.Current();
auto range = LiveRangeFor(operand_index); auto range = data()->GetOrCreateLiveRangeFor(operand_index);
range->AddUseInterval(start, end, allocation_zone()); range->AddUseInterval(start, end, allocation_zone());
iterator.Advance(); iterator.Advance();
} }
...@@ -1714,7 +1731,7 @@ int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) { ...@@ -1714,7 +1731,7 @@ int LiveRangeBuilder::FixedDoubleLiveRangeID(int index) {
} }
LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) { TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
DCHECK(index < config()->num_general_registers()); DCHECK(index < config()->num_general_registers());
auto result = data()->fixed_live_ranges()[index]; auto result = data()->fixed_live_ranges()[index];
if (result == nullptr) { if (result == nullptr) {
...@@ -1729,7 +1746,7 @@ LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) { ...@@ -1729,7 +1746,7 @@ LiveRange* LiveRangeBuilder::FixedLiveRangeFor(int index) {
} }
LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) { TopLevelLiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
DCHECK(index < config()->num_aliased_double_registers()); DCHECK(index < config()->num_aliased_double_registers());
auto result = data()->fixed_double_live_ranges()[index]; auto result = data()->fixed_double_live_ranges()[index];
if (result == nullptr) { if (result == nullptr) {
...@@ -1743,11 +1760,13 @@ LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) { ...@@ -1743,11 +1760,13 @@ LiveRange* LiveRangeBuilder::FixedDoubleLiveRangeFor(int index) {
} }
LiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) { TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
if (operand->IsUnallocated()) { if (operand->IsUnallocated()) {
return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register()); return data()->GetOrCreateLiveRangeFor(
UnallocatedOperand::cast(operand)->virtual_register());
} else if (operand->IsConstant()) { } else if (operand->IsConstant()) {
return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register()); return data()->GetOrCreateLiveRangeFor(
ConstantOperand::cast(operand)->virtual_register());
} else if (operand->IsRegister()) { } else if (operand->IsRegister()) {
return FixedLiveRangeFor(RegisterOperand::cast(operand)->index()); return FixedLiveRangeFor(RegisterOperand::cast(operand)->index());
} else if (operand->IsDoubleRegister()) { } else if (operand->IsDoubleRegister()) {
...@@ -1877,7 +1896,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block, ...@@ -1877,7 +1896,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
int vreg = unalloc->virtual_register(); int vreg = unalloc->virtual_register();
live->Add(vreg); live->Add(vreg);
if (unalloc->HasSlotPolicy()) { if (unalloc->HasSlotPolicy()) {
LiveRangeFor(vreg)->set_has_slot_use(true); data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(true);
} }
} }
Use(block_start_position, use_pos, input); Use(block_start_position, use_pos, input);
...@@ -1923,7 +1942,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block, ...@@ -1923,7 +1942,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
int phi_vreg = -1; int phi_vreg = -1;
if (to.IsUnallocated()) { if (to.IsUnallocated()) {
int to_vreg = UnallocatedOperand::cast(to).virtual_register(); int to_vreg = UnallocatedOperand::cast(to).virtual_register();
auto to_range = LiveRangeFor(to_vreg); auto to_range = data()->GetOrCreateLiveRangeFor(to_vreg);
if (to_range->is_phi()) { if (to_range->is_phi()) {
phi_vreg = to_vreg; phi_vreg = to_vreg;
if (to_range->is_non_loop_phi()) { if (to_range->is_non_loop_phi()) {
...@@ -2008,7 +2027,7 @@ void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block, ...@@ -2008,7 +2027,7 @@ void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
code()->LastLoopInstructionIndex(block)).NextFullStart(); code()->LastLoopInstructionIndex(block)).NextFullStart();
while (!iterator.Done()) { while (!iterator.Done()) {
int operand_index = iterator.Current(); int operand_index = iterator.Current();
auto range = LiveRangeFor(operand_index); TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
range->EnsureInterval(start, end, allocation_zone()); range->EnsureInterval(start, end, allocation_zone());
iterator.Advance(); iterator.Advance();
} }
...@@ -2090,7 +2109,7 @@ void LiveRangeBuilder::Verify() const { ...@@ -2090,7 +2109,7 @@ void LiveRangeBuilder::Verify() const {
for (auto& hint : phi_hints_) { for (auto& hint : phi_hints_) {
CHECK(hint.second->IsResolved()); CHECK(hint.second->IsResolved());
} }
for (auto current : data()->live_ranges()) { for (LiveRange* current : data()->live_ranges()) {
if (current != nullptr) current->Verify(); if (current != nullptr) current->Verify();
} }
} }
...@@ -2105,8 +2124,9 @@ RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, ...@@ -2105,8 +2124,9 @@ RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
LifetimePosition pos) { LifetimePosition pos) {
DCHECK(!range->IsFixed()); DCHECK(!range->TopLevel()->IsFixed());
TRACE("Splitting live range %d at %d\n", range->id(), pos.value()); TRACE("Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
range->relative_id(), pos.value());
if (pos <= range->Start()) return range; if (pos <= range->Start()) return range;
...@@ -2116,8 +2136,7 @@ LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, ...@@ -2116,8 +2136,7 @@ LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
(GetInstructionBlock(code(), pos)->last_instruction_index() != (GetInstructionBlock(code(), pos)->last_instruction_index() !=
pos.ToInstructionIndex())); pos.ToInstructionIndex()));
auto result = data()->NewChildRangeFor(range); LiveRange* result = range->SplitAt(pos, allocation_zone());
range->SplitAt(pos, result, allocation_zone());
return result; return result;
} }
...@@ -2125,9 +2144,10 @@ LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, ...@@ -2125,9 +2144,10 @@ LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
LiveRange* RegisterAllocator::SplitBetween(LiveRange* range, LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
LifetimePosition start, LifetimePosition start,
LifetimePosition end) { LifetimePosition end) {
DCHECK(!range->IsFixed()); DCHECK(!range->TopLevel()->IsFixed());
TRACE("Splitting live range %d in position between [%d, %d]\n", range->id(), TRACE("Splitting live range %d:%d in position between [%d, %d]\n",
start.value(), end.value()); range->TopLevel()->vreg(), range->relative_id(), start.value(),
end.value());
auto split_pos = FindOptimalSplitPos(start, end); auto split_pos = FindOptimalSplitPos(start, end);
DCHECK(split_pos >= start); DCHECK(split_pos >= start);
...@@ -2205,8 +2225,9 @@ LifetimePosition RegisterAllocator::FindOptimalSpillingPos( ...@@ -2205,8 +2225,9 @@ LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
void RegisterAllocator::Spill(LiveRange* range) { void RegisterAllocator::Spill(LiveRange* range) {
DCHECK(!range->spilled()); DCHECK(!range->spilled());
TRACE("Spilling live range %d\n", range->id()); TopLevelLiveRange* first = range->TopLevel();
auto first = range->TopLevel(); TRACE("Spilling live range %d:%d\n", first->vreg(), range->relative_id());
if (first->HasNoSpillType()) { if (first->HasNoSpillType()) {
data()->AssignSpillRangeToLiveRange(first); data()->AssignSpillRangeToLiveRange(first);
} }
...@@ -2214,7 +2235,8 @@ void RegisterAllocator::Spill(LiveRange* range) { ...@@ -2214,7 +2235,8 @@ void RegisterAllocator::Spill(LiveRange* range) {
} }
const ZoneVector<LiveRange*>& RegisterAllocator::GetFixedRegisters() const { const ZoneVector<TopLevelLiveRange*>& RegisterAllocator::GetFixedRegisters()
const {
return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges() return mode() == DOUBLE_REGISTERS ? data()->fixed_double_live_ranges()
: data()->fixed_live_ranges(); : data()->fixed_live_ranges();
} }
...@@ -2251,7 +2273,7 @@ void LinearScanAllocator::AllocateRegisters() { ...@@ -2251,7 +2273,7 @@ void LinearScanAllocator::AllocateRegisters() {
DCHECK(active_live_ranges().empty()); DCHECK(active_live_ranges().empty());
DCHECK(inactive_live_ranges().empty()); DCHECK(inactive_live_ranges().empty());
for (auto range : data()->live_ranges()) { for (LiveRange* range : data()->live_ranges()) {
if (range == nullptr) continue; if (range == nullptr) continue;
if (range->kind() == mode()) { if (range->kind() == mode()) {
AddToUnhandledUnsorted(range); AddToUnhandledUnsorted(range);
...@@ -2277,10 +2299,12 @@ void LinearScanAllocator::AllocateRegisters() { ...@@ -2277,10 +2299,12 @@ void LinearScanAllocator::AllocateRegisters() {
#ifdef DEBUG #ifdef DEBUG
allocation_finger_ = position; allocation_finger_ = position;
#endif #endif
TRACE("Processing interval %d start=%d\n", current->id(), position.value()); TRACE("Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
current->relative_id(), position.value());
if (!current->HasNoSpillType()) { if (current->IsTopLevel() && !current->TopLevel()->HasNoSpillType()) {
TRACE("Live range %d already has a spill operand\n", current->id()); TRACE("Live range %d:%d already has a spill operand\n",
current->TopLevel()->vreg(), current->relative_id());
auto next_pos = position; auto next_pos = position;
if (next_pos.IsGapPosition()) { if (next_pos.IsGapPosition()) {
next_pos = next_pos.NextStart(); next_pos = next_pos.NextStart();
...@@ -2300,7 +2324,8 @@ void LinearScanAllocator::AllocateRegisters() { ...@@ -2300,7 +2324,8 @@ void LinearScanAllocator::AllocateRegisters() {
} }
} }
if (TryReuseSpillForPhi(current)) continue; if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
continue;
for (size_t i = 0; i < active_live_ranges().size(); ++i) { for (size_t i = 0; i < active_live_ranges().size(); ++i) {
auto cur_active = active_live_ranges()[i]; auto cur_active = active_live_ranges()[i];
...@@ -2340,20 +2365,22 @@ void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range, ...@@ -2340,20 +2365,22 @@ void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
data()->MarkAllocated(range->kind(), reg); data()->MarkAllocated(range->kind(), reg);
range->set_assigned_register(reg); range->set_assigned_register(reg);
range->SetUseHints(reg); range->SetUseHints(reg);
if (range->is_phi()) { if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
data()->GetPhiMapValueFor(range->id())->set_assigned_register(reg); data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
} }
} }
void LinearScanAllocator::AddToActive(LiveRange* range) { void LinearScanAllocator::AddToActive(LiveRange* range) {
TRACE("Add live range %d to active\n", range->id()); TRACE("Add live range %d:%d to active\n", range->TopLevel()->vreg(),
range->relative_id());
active_live_ranges().push_back(range); active_live_ranges().push_back(range);
} }
void LinearScanAllocator::AddToInactive(LiveRange* range) { void LinearScanAllocator::AddToInactive(LiveRange* range) {
TRACE("Add live range %d to inactive\n", range->id()); TRACE("Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
range->relative_id());
inactive_live_ranges().push_back(range); inactive_live_ranges().push_back(range);
} }
...@@ -2366,13 +2393,15 @@ void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) { ...@@ -2366,13 +2393,15 @@ void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
--i) { --i) {
auto cur_range = unhandled_live_ranges().at(i); auto cur_range = unhandled_live_ranges().at(i);
if (!range->ShouldBeAllocatedBefore(cur_range)) continue; if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
TRACE("Add live range %d to unhandled at %d\n", range->id(), i + 1); TRACE("Add live range %d:%d to unhandled at %d\n",
range->TopLevel()->vreg(), range->relative_id(), i + 1);
auto it = unhandled_live_ranges().begin() + (i + 1); auto it = unhandled_live_ranges().begin() + (i + 1);
unhandled_live_ranges().insert(it, range); unhandled_live_ranges().insert(it, range);
DCHECK(UnhandledIsSorted()); DCHECK(UnhandledIsSorted());
return; return;
} }
TRACE("Add live range %d to unhandled at start\n", range->id()); TRACE("Add live range %d:%d to unhandled at start\n",
range->TopLevel()->vreg(), range->relative_id());
unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range); unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
DCHECK(UnhandledIsSorted()); DCHECK(UnhandledIsSorted());
} }
...@@ -2381,7 +2410,8 @@ void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) { ...@@ -2381,7 +2410,8 @@ void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) { void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
if (range == nullptr || range->IsEmpty()) return; if (range == nullptr || range->IsEmpty()) return;
DCHECK(!range->HasRegisterAssigned() && !range->spilled()); DCHECK(!range->HasRegisterAssigned() && !range->spilled());
TRACE("Add live range %d to unhandled unsorted at end\n", range->id()); TRACE("Add live range %d:%d to unhandled unsorted at end\n",
range->TopLevel()->vreg(), range->relative_id());
unhandled_live_ranges().push_back(range); unhandled_live_ranges().push_back(range);
} }
...@@ -2390,7 +2420,7 @@ static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) { ...@@ -2390,7 +2420,7 @@ static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a)); DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
if (a->ShouldBeAllocatedBefore(b)) return false; if (a->ShouldBeAllocatedBefore(b)) return false;
if (b->ShouldBeAllocatedBefore(a)) return true; if (b->ShouldBeAllocatedBefore(a)) return true;
return a->id() < b->id(); return a->TopLevel()->vreg() < b->TopLevel()->vreg();
} }
...@@ -2417,27 +2447,31 @@ bool LinearScanAllocator::UnhandledIsSorted() { ...@@ -2417,27 +2447,31 @@ bool LinearScanAllocator::UnhandledIsSorted() {
void LinearScanAllocator::ActiveToHandled(LiveRange* range) { void LinearScanAllocator::ActiveToHandled(LiveRange* range) {
RemoveElement(&active_live_ranges(), range); RemoveElement(&active_live_ranges(), range);
TRACE("Moving live range %d from active to handled\n", range->id()); TRACE("Moving live range %d:%d from active to handled\n",
range->TopLevel()->vreg(), range->relative_id());
} }
void LinearScanAllocator::ActiveToInactive(LiveRange* range) { void LinearScanAllocator::ActiveToInactive(LiveRange* range) {
RemoveElement(&active_live_ranges(), range); RemoveElement(&active_live_ranges(), range);
inactive_live_ranges().push_back(range); inactive_live_ranges().push_back(range);
TRACE("Moving live range %d from active to inactive\n", range->id()); TRACE("Moving live range %d:%d from active to inactive\n",
range->TopLevel()->vreg(), range->relative_id());
} }
void LinearScanAllocator::InactiveToHandled(LiveRange* range) { void LinearScanAllocator::InactiveToHandled(LiveRange* range) {
RemoveElement(&inactive_live_ranges(), range); RemoveElement(&inactive_live_ranges(), range);
TRACE("Moving live range %d from inactive to handled\n", range->id()); TRACE("Moving live range %d:%d from inactive to handled\n",
range->TopLevel()->vreg(), range->relative_id());
} }
void LinearScanAllocator::InactiveToActive(LiveRange* range) { void LinearScanAllocator::InactiveToActive(LiveRange* range) {
RemoveElement(&inactive_live_ranges(), range); RemoveElement(&inactive_live_ranges(), range);
active_live_ranges().push_back(range); active_live_ranges().push_back(range);
TRACE("Moving live range %d from inactive to active\n", range->id()); TRACE("Moving live range %d:%d from inactive to active\n",
range->TopLevel()->vreg(), range->relative_id());
} }
...@@ -2463,14 +2497,17 @@ bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { ...@@ -2463,14 +2497,17 @@ bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
int hint_register; int hint_register;
if (current->FirstHintPosition(&hint_register) != nullptr) { if (current->FirstHintPosition(&hint_register) != nullptr) {
TRACE("Found reg hint %s (free until [%d) for live range %d (end %d[).\n", TRACE(
"Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
RegisterName(hint_register), free_until_pos[hint_register].value(), RegisterName(hint_register), free_until_pos[hint_register].value(),
current->id(), current->End().value()); current->TopLevel()->vreg(), current->relative_id(),
current->End().value());
// The desired register is free until the end of the current live range. // The desired register is free until the end of the current live range.
if (free_until_pos[hint_register] >= current->End()) { if (free_until_pos[hint_register] >= current->End()) {
TRACE("Assigning preferred reg %s to live range %d\n", TRACE("Assigning preferred reg %s to live range %d:%d\n",
RegisterName(hint_register), current->id()); RegisterName(hint_register), current->TopLevel()->vreg(),
current->relative_id());
SetLiveRangeAssignedRegister(current, hint_register); SetLiveRangeAssignedRegister(current, hint_register);
return true; return true;
} }
...@@ -2501,8 +2538,8 @@ bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) { ...@@ -2501,8 +2538,8 @@ bool LinearScanAllocator::TryAllocateFreeReg(LiveRange* current) {
// Register reg is available at the range start and is free until // Register reg is available at the range start and is free until
// the range end. // the range end.
DCHECK(pos >= current->End()); DCHECK(pos >= current->End());
TRACE("Assigning free reg %s to live range %d\n", RegisterName(reg), TRACE("Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
current->id()); current->TopLevel()->vreg(), current->relative_id());
SetLiveRangeAssignedRegister(current, reg); SetLiveRangeAssignedRegister(current, reg);
return true; return true;
...@@ -2527,7 +2564,8 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { ...@@ -2527,7 +2564,8 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
for (auto range : active_live_ranges()) { for (auto range : active_live_ranges()) {
int cur_reg = range->assigned_register(); int cur_reg = range->assigned_register();
if (range->IsFixed() || !range->CanBeSpilled(current->Start())) { if (range->TopLevel()->IsFixed() ||
!range->CanBeSpilled(current->Start())) {
block_pos[cur_reg] = use_pos[cur_reg] = block_pos[cur_reg] = use_pos[cur_reg] =
LifetimePosition::GapFromInstructionIndex(0); LifetimePosition::GapFromInstructionIndex(0);
} else { } else {
...@@ -2546,7 +2584,7 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { ...@@ -2546,7 +2584,7 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
auto next_intersection = range->FirstIntersection(current); auto next_intersection = range->FirstIntersection(current);
if (!next_intersection.IsValid()) continue; if (!next_intersection.IsValid()) continue;
int cur_reg = range->assigned_register(); int cur_reg = range->assigned_register();
if (range->IsFixed()) { if (range->TopLevel()->IsFixed()) {
block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection); block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]); use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
} else { } else {
...@@ -2580,8 +2618,8 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) { ...@@ -2580,8 +2618,8 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
// Register reg is not blocked for the whole range. // Register reg is not blocked for the whole range.
DCHECK(block_pos[reg] >= current->End()); DCHECK(block_pos[reg] >= current->End());
TRACE("Assigning blocked reg %s to live range %d\n", RegisterName(reg), TRACE("Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
current->id()); current->TopLevel()->vreg(), current->relative_id());
SetLiveRangeAssignedRegister(current, reg); SetLiveRangeAssignedRegister(current, reg);
// This register was not free. Thus we need to find and spill // This register was not free. Thus we need to find and spill
...@@ -2621,7 +2659,7 @@ void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { ...@@ -2621,7 +2659,7 @@ void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
for (size_t i = 0; i < inactive_live_ranges().size(); ++i) { for (size_t i = 0; i < inactive_live_ranges().size(); ++i) {
auto range = inactive_live_ranges()[i]; auto range = inactive_live_ranges()[i];
DCHECK(range->End() > current->Start()); DCHECK(range->End() > current->Start());
if (range->assigned_register() == reg && !range->IsFixed()) { if (range->assigned_register() == reg && !range->TopLevel()->IsFixed()) {
LifetimePosition next_intersection = range->FirstIntersection(current); LifetimePosition next_intersection = range->FirstIntersection(current);
if (next_intersection.IsValid()) { if (next_intersection.IsValid()) {
UsePosition* next_pos = range->NextRegisterPosition(current->Start()); UsePosition* next_pos = range->NextRegisterPosition(current->Start());
...@@ -2639,10 +2677,11 @@ void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) { ...@@ -2639,10 +2677,11 @@ void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
} }
bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) { bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
if (range->IsChild() || !range->is_phi()) return false; if (!range->is_phi()) return false;
DCHECK(!range->HasSpillOperand()); DCHECK(!range->HasSpillOperand());
auto phi_map_value = data()->GetPhiMapValueFor(range->id()); auto phi_map_value = data()->GetPhiMapValueFor(range);
auto phi = phi_map_value->phi(); auto phi = phi_map_value->phi();
auto block = phi_map_value->block(); auto block = phi_map_value->block();
// Count the number of spilled operands. // Count the number of spilled operands.
...@@ -2650,8 +2689,8 @@ bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) { ...@@ -2650,8 +2689,8 @@ bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) {
LiveRange* first_op = nullptr; LiveRange* first_op = nullptr;
for (size_t i = 0; i < phi->operands().size(); i++) { for (size_t i = 0; i < phi->operands().size(); i++) {
int op = phi->operands()[i]; int op = phi->operands()[i];
LiveRange* op_range = LiveRangeFor(op); LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
if (!op_range->HasSpillRange()) continue; if (!op_range->TopLevel()->HasSpillRange()) continue;
auto pred = code()->InstructionBlockAt(block->predecessors()[i]); auto pred = code()->InstructionBlockAt(block->predecessors()[i]);
auto pred_end = LifetimePosition::InstructionFromInstructionIndex( auto pred_end = LifetimePosition::InstructionFromInstructionIndex(
pred->last_instruction_index()); pred->last_instruction_index());
...@@ -2674,11 +2713,11 @@ bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) { ...@@ -2674,11 +2713,11 @@ bool LinearScanAllocator::TryReuseSpillForPhi(LiveRange* range) {
// Try to merge the spilled operands and count the number of merged spilled // Try to merge the spilled operands and count the number of merged spilled
// operands. // operands.
DCHECK(first_op != nullptr); DCHECK(first_op != nullptr);
auto first_op_spill = first_op->GetSpillRange(); auto first_op_spill = first_op->TopLevel()->GetSpillRange();
size_t num_merged = 1; size_t num_merged = 1;
for (size_t i = 1; i < phi->operands().size(); i++) { for (size_t i = 1; i < phi->operands().size(); i++) {
int op = phi->operands()[i]; int op = phi->operands()[i];
auto op_range = LiveRangeFor(op); auto op_range = data()->GetOrCreateLiveRangeFor(op);
if (!op_range->HasSpillRange()) continue; if (!op_range->HasSpillRange()) continue;
auto op_spill = op_range->GetSpillRange(); auto op_spill = op_range->GetSpillRange();
if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) { if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
...@@ -2771,8 +2810,8 @@ SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data) ...@@ -2771,8 +2810,8 @@ SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
void SpillSlotLocator::LocateSpillSlots() { void SpillSlotLocator::LocateSpillSlots() {
auto code = data()->code(); auto code = data()->code();
for (auto range : data()->live_ranges()) { for (TopLevelLiveRange* range : data()->live_ranges()) {
if (range == nullptr || range->IsEmpty() || range->IsChild()) continue; if (range == nullptr || range->IsEmpty()) continue;
// We care only about ranges which spill in the frame. // We care only about ranges which spill in the frame.
if (!range->HasSpillRange()) continue; if (!range->HasSpillRange()) continue;
auto spills = range->spills_at_definition(); auto spills = range->spills_at_definition();
...@@ -2814,20 +2853,25 @@ void OperandAssigner::AssignSpillSlots() { ...@@ -2814,20 +2853,25 @@ void OperandAssigner::AssignSpillSlots() {
void OperandAssigner::CommitAssignment() { void OperandAssigner::CommitAssignment() {
for (auto range : data()->live_ranges()) { for (TopLevelLiveRange* top_range : data()->live_ranges()) {
if (range == nullptr || range->IsEmpty()) continue; if (top_range == nullptr || top_range->IsEmpty()) continue;
InstructionOperand spill_operand; InstructionOperand spill_operand;
if (range->TopLevel()->HasSpillOperand()) { if (top_range->HasSpillOperand()) {
spill_operand = *range->TopLevel()->GetSpillOperand(); spill_operand = *top_range->TopLevel()->GetSpillOperand();
} else if (range->TopLevel()->HasSpillRange()) { } else if (top_range->TopLevel()->HasSpillRange()) {
spill_operand = range->TopLevel()->GetSpillRangeOperand(); spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
}
if (top_range->is_phi()) {
data()->GetPhiMapValueFor(top_range)->CommitAssignment(
top_range->GetAssignedOperand());
} }
for (LiveRange* range = top_range; range != nullptr;
range = range->next()) {
auto assigned = range->GetAssignedOperand(); auto assigned = range->GetAssignedOperand();
range->ConvertUsesToOperand(assigned, spill_operand); range->ConvertUsesToOperand(assigned, spill_operand);
if (range->is_phi()) {
data()->GetPhiMapValueFor(range->id())->CommitAssignment(assigned);
} }
if (!range->IsChild() && !spill_operand.IsInvalid()) {
if (!spill_operand.IsInvalid()) {
// If this top level range has a child spilled in a deferred block, we use // If this top level range has a child spilled in a deferred block, we use
// the range and control flow connection mechanism instead of spilling at // the range and control flow connection mechanism instead of spilling at
// definition. Refer to the ConnectLiveRanges and ResolveControlFlow // definition. Refer to the ConnectLiveRanges and ResolveControlFlow
...@@ -2839,13 +2883,13 @@ void OperandAssigner::CommitAssignment() { ...@@ -2839,13 +2883,13 @@ void OperandAssigner::CommitAssignment() {
// moves between ranges. Because of how the ranges are split around // moves between ranges. Because of how the ranges are split around
// deferred blocks, this amounts to spilling and filling inside such // deferred blocks, this amounts to spilling and filling inside such
// blocks. // blocks.
if (!range->TryCommitSpillInDeferredBlock(data()->code(), if (!top_range->TryCommitSpillInDeferredBlock(data()->code(),
spill_operand)) { spill_operand)) {
// Spill at definition if the range isn't spilled only in deferred // Spill at definition if the range isn't spilled only in deferred
// blocks. // blocks.
range->CommitSpillsAtDefinition( top_range->CommitSpillsAtDefinition(
data()->code(), spill_operand, data()->code(), spill_operand,
range->has_slot_use() || range->spilled()); top_range->has_slot_use() || top_range->spilled());
} }
} }
} }
...@@ -2878,19 +2922,17 @@ void ReferenceMapPopulator::PopulateReferenceMaps() { ...@@ -2878,19 +2922,17 @@ void ReferenceMapPopulator::PopulateReferenceMaps() {
int last_range_start = 0; int last_range_start = 0;
auto reference_maps = data()->code()->reference_maps(); auto reference_maps = data()->code()->reference_maps();
ReferenceMapDeque::const_iterator first_it = reference_maps->begin(); ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
for (LiveRange* range : data()->live_ranges()) { for (TopLevelLiveRange* range : data()->live_ranges()) {
if (range == nullptr) continue; if (range == nullptr) continue;
// Iterate over the first parts of multi-part live ranges.
if (range->IsChild()) continue;
// Skip non-reference values. // Skip non-reference values.
if (!data()->IsReference(range->id())) continue; if (!data()->IsReference(range)) continue;
// Skip empty live ranges. // Skip empty live ranges.
if (range->IsEmpty()) continue; if (range->IsEmpty()) continue;
// Find the extent of the range and its children. // Find the extent of the range and its children.
int start = range->Start().ToInstructionIndex(); int start = range->Start().ToInstructionIndex();
int end = 0; int end = 0;
for (auto cur = range; cur != nullptr; cur = cur->next()) { for (LiveRange* cur = range; cur != nullptr; cur = cur->next()) {
auto this_end = cur->End(); auto this_end = cur->End();
if (this_end.ToInstructionIndex() > end) if (this_end.ToInstructionIndex() > end)
end = this_end.ToInstructionIndex(); end = this_end.ToInstructionIndex();
...@@ -2935,7 +2977,7 @@ void ReferenceMapPopulator::PopulateReferenceMaps() { ...@@ -2935,7 +2977,7 @@ void ReferenceMapPopulator::PopulateReferenceMaps() {
// safe point position. // safe point position.
auto safe_point_pos = auto safe_point_pos =
LifetimePosition::InstructionFromInstructionIndex(safe_point); LifetimePosition::InstructionFromInstructionIndex(safe_point);
auto cur = range; LiveRange* cur = range;
while (cur != nullptr && !cur->Covers(safe_point_pos)) { while (cur != nullptr && !cur->Covers(safe_point_pos)) {
cur = cur->next(); cur = cur->next();
} }
...@@ -2949,15 +2991,16 @@ void ReferenceMapPopulator::PopulateReferenceMaps() { ...@@ -2949,15 +2991,16 @@ void ReferenceMapPopulator::PopulateReferenceMaps() {
if (!spill_operand.IsInvalid() && safe_point >= spill_index) { if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
TRACE("Pointer for range %d (spilled at %d) at safe point %d\n", TRACE("Pointer for range %d (spilled at %d) at safe point %d\n",
range->id(), spill_index, safe_point); range->vreg(), spill_index, safe_point);
map->RecordReference(AllocatedOperand::cast(spill_operand)); map->RecordReference(AllocatedOperand::cast(spill_operand));
} }
if (!cur->spilled()) { if (!cur->spilled()) {
TRACE( TRACE(
"Pointer in register for range %d (start at %d) " "Pointer in register for range %d:%d (start at %d) "
"at safe point %d\n", "at safe point %d\n",
cur->id(), cur->Start().value(), safe_point); range->vreg(), cur->relative_id(), cur->Start().value(),
safe_point);
auto operand = cur->GetAssignedOperand(); auto operand = cur->GetAssignedOperand();
DCHECK(!operand.IsStackSlot()); DCHECK(!operand.IsStackSlot());
DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type()); DCHECK_EQ(kRepTagged, AllocatedOperand::cast(operand).machine_type());
...@@ -3184,10 +3227,11 @@ void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block, ...@@ -3184,10 +3227,11 @@ void LiveRangeConnector::ResolveControlFlow(const InstructionBlock* block,
void LiveRangeConnector::ConnectRanges(Zone* local_zone) { void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
DelayedInsertionMap delayed_insertion_map(local_zone); DelayedInsertionMap delayed_insertion_map(local_zone);
for (auto first_range : data()->live_ranges()) { for (TopLevelLiveRange* top_range : data()->live_ranges()) {
if (first_range == nullptr || first_range->IsChild()) continue; if (top_range == nullptr) continue;
bool connect_spilled = first_range->IsSpilledOnlyInDeferredBlocks(); bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
for (auto second_range = first_range->next(); second_range != nullptr; LiveRange* first_range = top_range;
for (LiveRange *second_range = first_range->next(); second_range != nullptr;
first_range = second_range, second_range = second_range->next()) { first_range = second_range, second_range = second_range->next()) {
auto pos = second_range->Start(); auto pos = second_range->Start();
// Add gap move if the two live ranges touch and there is no block // Add gap move if the two live ranges touch and there is no block
......
...@@ -274,27 +274,26 @@ class UsePosition final : public ZoneObject { ...@@ -274,27 +274,26 @@ class UsePosition final : public ZoneObject {
class SpillRange; class SpillRange;
class RegisterAllocationData; class RegisterAllocationData;
class TopLevelLiveRange;
// Representation of SSA values' live ranges as a collection of (continuous) // Representation of SSA values' live ranges as a collection of (continuous)
// intervals over the instruction ordering. // intervals over the instruction ordering.
class LiveRange final : public ZoneObject { class LiveRange : public ZoneObject {
public: public:
explicit LiveRange(int id, MachineType machine_type);
UseInterval* first_interval() const { return first_interval_; } UseInterval* first_interval() const { return first_interval_; }
UsePosition* first_pos() const { return first_pos_; } UsePosition* first_pos() const { return first_pos_; }
LiveRange* parent() const { return parent_; } TopLevelLiveRange* TopLevel() { return top_level_; }
LiveRange* TopLevel() { return (parent_ == nullptr) ? this : parent_; } const TopLevelLiveRange* TopLevel() const { return top_level_; }
const LiveRange* TopLevel() const {
return (parent_ == nullptr) ? this : parent_; bool IsTopLevel() const;
}
LiveRange* next() const { return next_; } LiveRange* next() const { return next_; }
bool IsChild() const { return parent() != nullptr; }
int id() const { return id_; } int relative_id() const { return relative_id_; }
bool IsFixed() const { return id_ < 0; }
bool IsEmpty() const { return first_interval() == nullptr; } bool IsEmpty() const { return first_interval() == nullptr; }
InstructionOperand GetAssignedOperand() const; InstructionOperand GetAssignedOperand() const;
int spill_start_index() const { return spill_start_index_; }
MachineType machine_type() const { return MachineTypeField::decode(bits_); } MachineType machine_type() const { return MachineTypeField::decode(bits_); }
...@@ -310,22 +309,6 @@ class LiveRange final : public ZoneObject { ...@@ -310,22 +309,6 @@ class LiveRange final : public ZoneObject {
RegisterKind kind() const; RegisterKind kind() const;
// Correct only for parent.
bool is_phi() const { return IsPhiField::decode(bits_); }
void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
// Correct only for parent.
bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
void set_is_non_loop_phi(bool value) {
bits_ = IsNonLoopPhiField::update(bits_, value);
}
// Relevant only for parent.
bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
void set_has_slot_use(bool value) {
bits_ = HasSlotUseField::update(bits_, value);
}
// Returns use position in this live range that follows both start // Returns use position in this live range that follows both start
// and last processed use position. // and last processed use position.
UsePosition* NextUsePosition(LifetimePosition start) const; UsePosition* NextUsePosition(LifetimePosition start) const;
...@@ -350,14 +333,18 @@ class LiveRange final : public ZoneObject { ...@@ -350,14 +333,18 @@ class LiveRange final : public ZoneObject {
// Can this live range be spilled at this position. // Can this live range be spilled at this position.
bool CanBeSpilled(LifetimePosition pos) const; bool CanBeSpilled(LifetimePosition pos) const;
// Split this live range at the given position which must follow the start of // Splitting primitive used by both splitting and splintering members.
// the range. // Performs the split, but does not link the resulting ranges.
// The given position must follow the start of the range.
// All uses following the given position will be moved from this // All uses following the given position will be moved from this
// live range to the result live range. // live range to the result live range.
void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); // The current range will terminate at position, while result will start from
void Splinter(LifetimePosition start, LifetimePosition end, LiveRange* result, // position.
Zone* zone); void DetachAt(LifetimePosition position, LiveRange* result, Zone* zone);
void Merge(LiveRange* other, RegisterAllocationData* data);
// Detaches at position, and then links the resulting ranges. Returns the
// child, which starts at position.
LiveRange* SplitAt(LifetimePosition position, Zone* zone);
// Returns nullptr when no register is hinted, otherwise sets register_index. // Returns nullptr when no register is hinted, otherwise sets register_index.
UsePosition* FirstHintPosition(int* register_index) const; UsePosition* FirstHintPosition(int* register_index) const;
...@@ -381,7 +368,117 @@ class LiveRange final : public ZoneObject { ...@@ -381,7 +368,117 @@ class LiveRange final : public ZoneObject {
return last_interval_->end(); return last_interval_->end();
} }
bool ShouldBeAllocatedBefore(const LiveRange* other) const;
bool CanCover(LifetimePosition position) const;
bool Covers(LifetimePosition position) const;
LifetimePosition FirstIntersection(LiveRange* other) const;
void Verify() const;
void ConvertUsesToOperand(const InstructionOperand& op,
const InstructionOperand& spill_op);
void SetUseHints(int register_index);
void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
// Used solely by the Greedy Allocator:
unsigned GetSize();
float weight() const { return weight_; }
void set_weight(float weight) { weight_ = weight; }
static const int kInvalidSize = -1;
static const float kInvalidWeight;
static const float kMaxWeight;
private:
friend class TopLevelLiveRange;
explicit LiveRange(int relative_id, MachineType machine_type,
TopLevelLiveRange* top_level);
void AppendAsChild(TopLevelLiveRange* other);
void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
void AdvanceLastProcessedMarker(UseInterval* to_start_of,
LifetimePosition but_not_past) const;
typedef BitField<bool, 0, 1> SpilledField;
typedef BitField<int32_t, 6, 6> AssignedRegisterField;
typedef BitField<MachineType, 12, 15> MachineTypeField;
int relative_id_;
uint32_t bits_;
UseInterval* last_interval_;
UseInterval* first_interval_;
UsePosition* first_pos_;
TopLevelLiveRange* top_level_;
LiveRange* next_;
// This is used as a cache, it doesn't affect correctness.
mutable UseInterval* current_interval_;
// This is used as a cache, it doesn't affect correctness.
mutable UsePosition* last_processed_use_;
// This is used as a cache, it's invalid outside of BuildLiveRanges.
mutable UsePosition* current_hint_position_;
// greedy: the number of LifetimePositions covered by this range. Used to
// prioritize selecting live ranges for register assignment, as well as
// in weight calculations.
int size_;
// greedy: a metric for resolving conflicts between ranges with an assigned
// register and ranges that intersect them and need a register.
float weight_;
DISALLOW_COPY_AND_ASSIGN(LiveRange);
};
class TopLevelLiveRange final : public LiveRange {
public:
explicit TopLevelLiveRange(int vreg, MachineType machine_type);
int spill_start_index() const { return spill_start_index_; }
bool IsFixed() const { return vreg_ < 0; }
bool is_phi() const { return IsPhiField::decode(bits_); }
void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
void set_is_non_loop_phi(bool value) {
bits_ = IsNonLoopPhiField::update(bits_, value);
}
bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
void set_has_slot_use(bool value) {
bits_ = HasSlotUseField::update(bits_, value);
}
// Add a new interval or a new use position to this live range.
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUsePosition(UsePosition* pos);
// Shorten the most recently added interval by setting a new start.
void ShortenTo(LifetimePosition start);
// Detaches between start and end, and attributes the resulting range to
// result.
// The current range is pointed to as "splintered_from". No parent/child
// relationship is established between this and result.
void Splinter(LifetimePosition start, LifetimePosition end,
TopLevelLiveRange* result, Zone* zone);
// Assuming other was splintered from this range, embeds other and its
// children as part of the children sequence of this range.
void Merge(TopLevelLiveRange* other, RegisterAllocationData* data);
// Spill range management.
void SetSpillRange(SpillRange* spill_range);
enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
void set_spill_type(SpillType value) {
bits_ = SpillTypeField::update(bits_, value);
}
SpillType spill_type() const { return SpillTypeField::decode(bits_); } SpillType spill_type() const { return SpillTypeField::decode(bits_); }
InstructionOperand* GetSpillOperand() const { InstructionOperand* GetSpillOperand() const {
DCHECK(spill_type() == SpillType::kSpillOperand); DCHECK(spill_type() == SpillType::kSpillOperand);
...@@ -404,21 +501,21 @@ class LiveRange final : public ZoneObject { ...@@ -404,21 +501,21 @@ class LiveRange final : public ZoneObject {
return spill_type() == SpillType::kSpillOperand; return spill_type() == SpillType::kSpillOperand;
} }
bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
bool MayRequireSpillRange() const {
DCHECK(!IsChild() && !IsSplinter());
return !HasSpillOperand() && spill_range_ == nullptr;
}
AllocatedOperand GetSpillRangeOperand() const; AllocatedOperand GetSpillRangeOperand() const;
void SpillAtDefinition(Zone* zone, int gap_index, void SpillAtDefinition(Zone* zone, int gap_index,
InstructionOperand* operand); InstructionOperand* operand);
void SetSpillOperand(InstructionOperand* operand); void SetSpillOperand(InstructionOperand* operand);
void SetSpillRange(SpillRange* spill_range); void SetSpillStartIndex(int start) {
spill_start_index_ = Min(start, spill_start_index_);
}
void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
void CommitSpillsAtDefinition(InstructionSequence* sequence, void CommitSpillsAtDefinition(InstructionSequence* sequence,
const InstructionOperand& operand, const InstructionOperand& operand,
bool might_be_duplicated); bool might_be_duplicated);
// This must be applied on top level ranges.
// If all the children of this range are spilled in deferred blocks, and if // If all the children of this range are spilled in deferred blocks, and if
// for any non-spilled child with a use position requiring a slot, that range // for any non-spilled child with a use position requiring a slot, that range
// is contained in a deferred block, mark the range as // is contained in a deferred block, mark the range as
...@@ -429,29 +526,19 @@ class LiveRange final : public ZoneObject { ...@@ -429,29 +526,19 @@ class LiveRange final : public ZoneObject {
bool TryCommitSpillInDeferredBlock(InstructionSequence* code, bool TryCommitSpillInDeferredBlock(InstructionSequence* code,
const InstructionOperand& spill_operand); const InstructionOperand& spill_operand);
void SetSpillStartIndex(int start) { TopLevelLiveRange* splintered_from() const { return splintered_from_; }
spill_start_index_ = Min(start, spill_start_index_); bool IsSplinter() const { return splintered_from_ != nullptr; }
bool MayRequireSpillRange() const {
DCHECK(!IsSplinter());
return !HasSpillOperand() && spill_range_ == nullptr;
} }
void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
int vreg() const { return vreg_; }
bool ShouldBeAllocatedBefore(const LiveRange* other) const; int GetNextChildId() { return ++last_child_id_; }
bool CanCover(LifetimePosition position) const; bool IsSpilledOnlyInDeferredBlocks() const {
bool Covers(LifetimePosition position) const; return spilled_in_deferred_blocks_;
LifetimePosition FirstIntersection(LiveRange* other) const; }
// Add a new interval or a new use position to this live range.
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUsePosition(UsePosition* pos);
// Shorten the most recently added interval by setting a new start.
void ShortenTo(LifetimePosition start);
void Verify() const;
void ConvertUsesToOperand(const InstructionOperand& op,
const InstructionOperand& spill_op);
void SetUseHints(int register_index);
void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
struct SpillAtDefinitionList; struct SpillAtDefinitionList;
...@@ -459,91 +546,29 @@ class LiveRange final : public ZoneObject { ...@@ -459,91 +546,29 @@ class LiveRange final : public ZoneObject {
return spills_at_definition_; return spills_at_definition_;
} }
// Used solely by the Greedy Allocator:
unsigned GetSize();
float weight() const { return weight_; }
void set_weight(float weight) { weight_ = weight; }
bool IsSpilledOnlyInDeferredBlocks() const {
return spilled_in_deferred_block_;
}
static const int kInvalidSize = -1;
static const float kInvalidWeight;
static const float kMaxWeight;
LiveRange* splintered_from() const {
DCHECK(!IsChild());
return splintered_from_;
}
bool IsSplinter() const {
DCHECK(!IsChild());
return splintered_from_ != nullptr;
}
void set_spill_type(SpillType value) {
bits_ = SpillTypeField::update(bits_, value);
}
private: private:
void AppendChild(LiveRange* other);
void UpdateParentForAllChildren(LiveRange* new_parent);
void UpdateSpillRangePostMerge(LiveRange* merged);
void SetSplinteredFrom(LiveRange* splinter_parent);
void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
void AdvanceLastProcessedMarker(UseInterval* to_start_of,
LifetimePosition but_not_past) const;
LiveRange* GetLastChild();
typedef BitField<bool, 0, 1> SpilledField;
typedef BitField<bool, 1, 1> HasSlotUseField; typedef BitField<bool, 1, 1> HasSlotUseField;
typedef BitField<bool, 2, 1> IsPhiField; typedef BitField<bool, 2, 1> IsPhiField;
typedef BitField<bool, 3, 1> IsNonLoopPhiField; typedef BitField<bool, 3, 1> IsNonLoopPhiField;
typedef BitField<SpillType, 4, 2> SpillTypeField; typedef BitField<SpillType, 4, 2> SpillTypeField;
typedef BitField<int32_t, 6, 6> AssignedRegisterField;
typedef BitField<MachineType, 12, 15> MachineTypeField;
int id_; LiveRange* GetLastChild();
int spill_start_index_;
uint32_t bits_; int vreg_;
UseInterval* last_interval_; int last_child_id_;
UseInterval* first_interval_; TopLevelLiveRange* splintered_from_;
UsePosition* first_pos_;
LiveRange* parent_;
LiveRange* next_;
LiveRange* splintered_from_;
union { union {
// Correct value determined by spill_type() // Correct value determined by spill_type()
InstructionOperand* spill_operand_; InstructionOperand* spill_operand_;
SpillRange* spill_range_; SpillRange* spill_range_;
}; };
SpillAtDefinitionList* spills_at_definition_; SpillAtDefinitionList* spills_at_definition_;
// This is used as a cache, it doesn't affect correctness.
mutable UseInterval* current_interval_;
// This is used as a cache, it doesn't affect correctness.
mutable UsePosition* last_processed_use_;
// This is used as a cache, it's invalid outside of BuildLiveRanges.
mutable UsePosition* current_hint_position_;
// greedy: the number of LifetimePositions covered by this range. Used to
// prioritize selecting live ranges for register assignment, as well as
// in weight calculations.
int size_;
// greedy: a metric for resolving conflicts between ranges with an assigned
// register and ranges that intersect them and need a register.
float weight_;
// TODO(mtrofin): generalize spilling after definition, currently specialized // TODO(mtrofin): generalize spilling after definition, currently specialized
// just for spill in a single deferred block. // just for spill in a single deferred block.
bool spilled_in_deferred_block_; bool spilled_in_deferred_blocks_;
DISALLOW_COPY_AND_ASSIGN(LiveRange); int spill_start_index_;
DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
}; };
...@@ -560,7 +585,7 @@ std::ostream& operator<<(std::ostream& os, ...@@ -560,7 +585,7 @@ std::ostream& operator<<(std::ostream& os,
class SpillRange final : public ZoneObject { class SpillRange final : public ZoneObject {
public: public:
static const int kUnassignedSlot = -1; static const int kUnassignedSlot = -1;
SpillRange(LiveRange* range, Zone* zone); SpillRange(TopLevelLiveRange* range, Zone* zone);
UseInterval* interval() const { return use_interval_; } UseInterval* interval() const { return use_interval_; }
// Currently, only 4 or 8 byte slots are supported. // Currently, only 4 or 8 byte slots are supported.
...@@ -576,8 +601,10 @@ class SpillRange final : public ZoneObject { ...@@ -576,8 +601,10 @@ class SpillRange final : public ZoneObject {
DCHECK_NE(kUnassignedSlot, assigned_slot_); DCHECK_NE(kUnassignedSlot, assigned_slot_);
return assigned_slot_; return assigned_slot_;
} }
const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } return live_ranges_;
}
ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
int byte_width() const { return byte_width_; } int byte_width() const { return byte_width_; }
RegisterKind kind() const { return kind_; } RegisterKind kind() const { return kind_; }
...@@ -587,7 +614,7 @@ class SpillRange final : public ZoneObject { ...@@ -587,7 +614,7 @@ class SpillRange final : public ZoneObject {
// Merge intervals, making sure the use intervals are sorted // Merge intervals, making sure the use intervals are sorted
void MergeDisjointIntervals(UseInterval* other); void MergeDisjointIntervals(UseInterval* other);
ZoneVector<LiveRange*> live_ranges_; ZoneVector<TopLevelLiveRange*> live_ranges_;
UseInterval* use_interval_; UseInterval* use_interval_;
LifetimePosition end_position_; LifetimePosition end_position_;
int assigned_slot_; int assigned_slot_;
...@@ -637,16 +664,20 @@ class RegisterAllocationData final : public ZoneObject { ...@@ -637,16 +664,20 @@ class RegisterAllocationData final : public ZoneObject {
InstructionSequence* code, InstructionSequence* code,
const char* debug_name = nullptr); const char* debug_name = nullptr);
const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } return live_ranges_;
const ZoneVector<LiveRange*>& fixed_live_ranges() const { }
ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
return fixed_live_ranges_; return fixed_live_ranges_;
} }
ZoneVector<LiveRange*>& fixed_live_ranges() { return fixed_live_ranges_; } ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
ZoneVector<LiveRange*>& fixed_double_live_ranges() { return fixed_live_ranges_;
}
ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
return fixed_double_live_ranges_; return fixed_double_live_ranges_;
} }
const ZoneVector<LiveRange*>& fixed_double_live_ranges() const { const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
return fixed_double_live_ranges_; return fixed_double_live_ranges_;
} }
ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; } ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
...@@ -665,21 +696,20 @@ class RegisterAllocationData final : public ZoneObject { ...@@ -665,21 +696,20 @@ class RegisterAllocationData final : public ZoneObject {
MachineType MachineTypeFor(int virtual_register); MachineType MachineTypeFor(int virtual_register);
LiveRange* LiveRangeFor(int index); TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
// Creates a new live range. // Creates a new live range.
LiveRange* NewLiveRange(int index, MachineType machine_type); TopLevelLiveRange* NewLiveRange(int index, MachineType machine_type);
LiveRange* NextLiveRange(MachineType machine_type); TopLevelLiveRange* NextLiveRange(MachineType machine_type);
LiveRange* NewChildRangeFor(LiveRange* range);
SpillRange* AssignSpillRangeToLiveRange(LiveRange* range); SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
SpillRange* CreateSpillRangeForLiveRange(LiveRange* range); SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
MoveOperands* AddGapMove(int index, Instruction::GapPosition position, MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
const InstructionOperand& from, const InstructionOperand& from,
const InstructionOperand& to); const InstructionOperand& to);
bool IsReference(int virtual_register) const { bool IsReference(TopLevelLiveRange* top_range) const {
return code()->IsReference(virtual_register); return code()->IsReference(top_range->vreg());
} }
bool ExistsUseWithoutDefinition(); bool ExistsUseWithoutDefinition();
...@@ -688,6 +718,7 @@ class RegisterAllocationData final : public ZoneObject { ...@@ -688,6 +718,7 @@ class RegisterAllocationData final : public ZoneObject {
PhiMapValue* InitializePhiMap(const InstructionBlock* block, PhiMapValue* InitializePhiMap(const InstructionBlock* block,
PhiInstruction* phi); PhiInstruction* phi);
PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
PhiMapValue* GetPhiMapValueFor(int virtual_register); PhiMapValue* GetPhiMapValueFor(int virtual_register);
bool IsBlockBoundary(LifetimePosition pos) const; bool IsBlockBoundary(LifetimePosition pos) const;
...@@ -699,6 +730,8 @@ class RegisterAllocationData final : public ZoneObject { ...@@ -699,6 +730,8 @@ class RegisterAllocationData final : public ZoneObject {
void Print(const SpillRange* spill_range); void Print(const SpillRange* spill_range);
private: private:
int GetNextLiveRangeId();
Zone* const allocation_zone_; Zone* const allocation_zone_;
Frame* const frame_; Frame* const frame_;
InstructionSequence* const code_; InstructionSequence* const code_;
...@@ -706,9 +739,9 @@ class RegisterAllocationData final : public ZoneObject { ...@@ -706,9 +739,9 @@ class RegisterAllocationData final : public ZoneObject {
const RegisterConfiguration* const config_; const RegisterConfiguration* const config_;
PhiMap phi_map_; PhiMap phi_map_;
ZoneVector<BitVector*> live_in_sets_; ZoneVector<BitVector*> live_in_sets_;
ZoneVector<LiveRange*> live_ranges_; ZoneVector<TopLevelLiveRange*> live_ranges_;
ZoneVector<LiveRange*> fixed_live_ranges_; ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
ZoneVector<LiveRange*> fixed_double_live_ranges_; ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
ZoneSet<SpillRange*> spill_ranges_; ZoneSet<SpillRange*> spill_ranges_;
DelayedReferences delayed_references_; DelayedReferences delayed_references_;
BitVector* assigned_registers_; BitVector* assigned_registers_;
...@@ -735,12 +768,6 @@ class ConstraintBuilder final : public ZoneObject { ...@@ -735,12 +768,6 @@ class ConstraintBuilder final : public ZoneObject {
InstructionSequence* code() const { return data()->code(); } InstructionSequence* code() const { return data()->code(); }
Zone* allocation_zone() const { return data()->allocation_zone(); } Zone* allocation_zone() const { return data()->allocation_zone(); }
Instruction* InstructionAt(int index) { return code()->InstructionAt(index); }
bool IsReference(int virtual_register) const {
return data()->IsReference(virtual_register);
}
LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); }
InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
bool is_tagged); bool is_tagged);
void MeetRegisterConstraints(const InstructionBlock* block); void MeetRegisterConstraints(const InstructionBlock* block);
...@@ -775,8 +802,6 @@ class LiveRangeBuilder final : public ZoneObject { ...@@ -775,8 +802,6 @@ class LiveRangeBuilder final : public ZoneObject {
return data()->live_in_sets(); return data()->live_in_sets();
} }
LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); }
void Verify() const; void Verify() const;
// Liveness analysis support. // Liveness analysis support.
...@@ -787,8 +812,8 @@ class LiveRangeBuilder final : public ZoneObject { ...@@ -787,8 +812,8 @@ class LiveRangeBuilder final : public ZoneObject {
static int FixedLiveRangeID(int index) { return -index - 1; } static int FixedLiveRangeID(int index) { return -index - 1; }
int FixedDoubleLiveRangeID(int index); int FixedDoubleLiveRangeID(int index);
LiveRange* FixedLiveRangeFor(int index); TopLevelLiveRange* FixedLiveRangeFor(int index);
LiveRange* FixedDoubleLiveRangeFor(int index); TopLevelLiveRange* FixedDoubleLiveRangeFor(int index);
void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos); void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos); void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
...@@ -798,7 +823,7 @@ class LiveRangeBuilder final : public ZoneObject { ...@@ -798,7 +823,7 @@ class LiveRangeBuilder final : public ZoneObject {
UsePosition* NewUsePosition(LifetimePosition pos) { UsePosition* NewUsePosition(LifetimePosition pos) {
return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone); return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
} }
LiveRange* LiveRangeFor(InstructionOperand* operand); TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
// Helper methods for building intervals. // Helper methods for building intervals.
UsePosition* Define(LifetimePosition position, InstructionOperand* operand, UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
void* hint, UsePositionHintType hint_type); void* hint, UsePositionHintType hint_type);
...@@ -832,8 +857,6 @@ class RegisterAllocator : public ZoneObject { ...@@ -832,8 +857,6 @@ class RegisterAllocator : public ZoneObject {
Zone* allocation_zone() const { return data()->allocation_zone(); } Zone* allocation_zone() const { return data()->allocation_zone(); }
LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); }
// Split the given range at the given position. // Split the given range at the given position.
// If range starts at or after the given position then the // If range starts at or after the given position then the
// original range is returned. // original range is returned.
...@@ -859,7 +882,7 @@ class RegisterAllocator : public ZoneObject { ...@@ -859,7 +882,7 @@ class RegisterAllocator : public ZoneObject {
LifetimePosition FindOptimalSpillingPos(LiveRange* range, LifetimePosition FindOptimalSpillingPos(LiveRange* range,
LifetimePosition pos); LifetimePosition pos);
const ZoneVector<LiveRange*>& GetFixedRegisters() const; const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
const char* RegisterName(int allocation_index) const; const char* RegisterName(int allocation_index) const;
private: private:
...@@ -903,7 +926,7 @@ class LinearScanAllocator final : public RegisterAllocator { ...@@ -903,7 +926,7 @@ class LinearScanAllocator final : public RegisterAllocator {
void InactiveToActive(LiveRange* range); void InactiveToActive(LiveRange* range);
// Helper methods for allocating registers. // Helper methods for allocating registers.
bool TryReuseSpillForPhi(LiveRange* range); bool TryReuseSpillForPhi(TopLevelLiveRange* range);
bool TryAllocateFreeReg(LiveRange* range); bool TryAllocateFreeReg(LiveRange* range);
void AllocateBlockedReg(LiveRange* range); void AllocateBlockedReg(LiveRange* range);
......
...@@ -29,7 +29,8 @@ class TestRangeBuilder { ...@@ -29,7 +29,8 @@ class TestRangeBuilder {
LiveRange* Build(int start, int end) { return Add(start, end).Build(); } LiveRange* Build(int start, int end) { return Add(start, end).Build(); }
LiveRange* Build() { LiveRange* Build() {
LiveRange* range = new (zone_) LiveRange(id_, MachineType::kRepTagged); TopLevelLiveRange* range =
new (zone_) TopLevelLiveRange(id_, MachineType::kRepTagged);
// Traverse the provided interval specifications backwards, because that is // Traverse the provided interval specifications backwards, because that is
// what LiveRange expects. // what LiveRange expects.
for (int i = static_cast<int>(pairs_.size()) - 1; i >= 0; --i) { for (int i = static_cast<int>(pairs_.size()) - 1; i >= 0; --i) {
...@@ -100,8 +101,9 @@ void CoalescedLiveRangesTest::RemoveConflicts(LiveRange* range) { ...@@ -100,8 +101,9 @@ void CoalescedLiveRangesTest::RemoveConflicts(LiveRange* range) {
LiveRangeIDs seen(zone()); LiveRangeIDs seen(zone());
for (auto c = conflicts.Current(); c != nullptr; for (auto c = conflicts.Current(); c != nullptr;
c = conflicts.RemoveCurrentAndGetNext()) { c = conflicts.RemoveCurrentAndGetNext()) {
EXPECT_FALSE(seen.count(c->id()) > 0); int id = c->TopLevel()->vreg();
seen.insert(c->id()); EXPECT_FALSE(seen.count(id) > 0);
seen.insert(c->TopLevel()->vreg());
} }
} }
...@@ -118,7 +120,7 @@ bool CoalescedLiveRangesTest::IsRangeConflictingWith(const LiveRange* range, ...@@ -118,7 +120,7 @@ bool CoalescedLiveRangesTest::IsRangeConflictingWith(const LiveRange* range,
auto conflicts = ranges().GetConflicts(range); auto conflicts = ranges().GetConflicts(range);
for (auto conflict = conflicts.Current(); conflict != nullptr; for (auto conflict = conflicts.Current(); conflict != nullptr;
conflict = conflicts.GetNext()) { conflict = conflicts.GetNext()) {
found_ids.insert(conflict->id()); found_ids.insert(conflict->TopLevel()->vreg());
} }
return found_ids == ids; return found_ids == ids;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment