Commit 9d51f8f5 authored by dcarney's avatar dcarney Committed by Commit bot

Revert of revert r25736 (patchset #2 id:20001 of https://codereview.chromium.org/803493002/)

Reason for revert:
performance bots were unchanged by the original revert

Original issue's description:
> revert r25736
>
> R=bmeurer@chromium.org
>
> BUG=

TBR=bmeurer@chromium.org
NOTREECHECKS=true
NOTRY=true
BUG=

Review URL: https://codereview.chromium.org/787183003

Cr-Commit-Position: refs/heads/master@{#25801}
parent 22302b51
......@@ -725,7 +725,7 @@ void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) {
}
} else if (range->IsSpilled()) {
int index = -1;
if (range->TopLevel()->GetSpillRange() != nullptr) {
if (range->TopLevel()->HasSpillRange()) {
index = kMaxInt; // This hasn't been set yet.
} else {
index = range->TopLevel()->GetSpillOperand()->index();
......
......@@ -15,8 +15,6 @@ std::ostream& operator<<(std::ostream& os,
const InstructionOperand& op = *printable.op_;
const RegisterConfiguration* conf = printable.register_configuration_;
switch (op.kind()) {
case InstructionOperand::INVALID:
return os << "(0)";
case InstructionOperand::UNALLOCATED: {
const UnallocatedOperand* unalloc = UnallocatedOperand::cast(&op);
os << "v" << unalloc->virtual_register();
......
......@@ -39,7 +39,6 @@ const InstructionCode kSourcePositionInstruction = -3;
class InstructionOperand : public ZoneObject {
public:
enum Kind {
INVALID,
UNALLOCATED,
CONSTANT,
IMMEDIATE,
......@@ -49,7 +48,6 @@ class InstructionOperand : public ZoneObject {
DOUBLE_REGISTER
};
InstructionOperand() : value_(KindField::encode(INVALID)) {}
InstructionOperand(Kind kind, int index) { ConvertTo(kind, index); }
Kind kind() const { return KindField::decode(value_); }
......@@ -58,7 +56,6 @@ class InstructionOperand : public ZoneObject {
bool Is##name() const { return kind() == type; }
INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_PREDICATE)
INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
INSTRUCTION_OPERAND_PREDICATE(Ignored, INVALID, 0)
#undef INSTRUCTION_OPERAND_PREDICATE
bool Equals(const InstructionOperand* other) const {
return value_ == other->value_;
......@@ -291,16 +288,12 @@ class MoveOperands FINAL {
}
// A move is redundant if it's been eliminated, if its source and
// destination are the same, or if its destination is unneeded or constant.
// destination are the same, or if its destination is constant.
bool IsRedundant() const {
return IsEliminated() || source_->Equals(destination_) || IsIgnored() ||
return IsEliminated() || source_->Equals(destination_) ||
(destination_ != NULL && destination_->IsConstant());
}
bool IsIgnored() const {
return destination_ != NULL && destination_->IsIgnored();
}
// We clear both operands to indicate move that's been eliminated.
void Eliminate() { source_ = destination_ = NULL; }
bool IsEliminated() const {
......@@ -348,7 +341,7 @@ class SubKindOperand FINAL : public InstructionOperand {
private:
static SubKindOperand* cache;
SubKindOperand() : InstructionOperand() {}
SubKindOperand() : InstructionOperand(kOperandKind, 0) {} // For the caches.
explicit SubKindOperand(int index)
: InstructionOperand(kOperandKind, index) {}
};
......
......@@ -585,6 +585,15 @@ struct ReuseSpillSlotsPhase {
};
struct CommitAssignmentPhase {
static const char* phase_name() { return "commit assignment"; }
void Run(PipelineData* data, Zone* temp_zone) {
data->register_allocator()->CommitAssignment();
}
};
struct PopulatePointerMapsPhase {
static const char* phase_name() { return "populate pointer maps"; }
......@@ -1036,6 +1045,7 @@ void Pipeline::AllocateRegisters(const RegisterConfiguration* config,
if (FLAG_turbo_reuse_spill_slots) {
Run<ReuseSpillSlotsPhase>();
}
Run<CommitAssignmentPhase>();
Run<PopulatePointerMapsPhase>();
Run<ConnectRangesPhase>();
Run<ResolveControlFlowPhase>();
......
......@@ -276,10 +276,8 @@ class RegisterAllocatorVerifier::OutgoingMapping : public ZoneObject {
auto* moves = move->move_operands();
for (auto i = moves->begin(); i != moves->end(); ++i) {
if (i->IsEliminated()) continue;
CHECK(i->source()->kind() != InstructionOperand::INVALID);
auto cur = locations()->find(i->source());
CHECK(cur != locations()->end());
if (i->destination()->kind() == InstructionOperand::INVALID) continue;
to_insert.insert(std::make_pair(i->destination(), cur->second));
}
// Drop current mappings.
......
......@@ -74,6 +74,16 @@ void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
}
struct LiveRange::SpillAtDefinitionList : ZoneObject {
SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
SpillAtDefinitionList* next)
: gap_index(gap_index), operand(operand), next(next) {}
const int gap_index;
InstructionOperand* const operand;
SpillAtDefinitionList* const next;
};
#ifdef DEBUG
......@@ -119,53 +129,72 @@ LiveRange::LiveRange(int id, Zone* zone)
current_interval_(nullptr),
last_processed_use_(nullptr),
current_hint_operand_(nullptr),
spill_operand_(new (zone) InstructionOperand()),
spill_start_index_(kMaxInt),
spill_range_(nullptr) {}
spill_type_(SpillType::kNoSpillType),
spill_operand_(nullptr),
spills_at_definition_(nullptr) {}
void LiveRange::set_assigned_register(int reg, Zone* zone) {
DCHECK(!HasRegisterAssigned() && !IsSpilled());
assigned_register_ = reg;
if (spill_range_ == nullptr) {
ConvertOperands(zone);
}
// TODO(dcarney): stop aliasing hint operands.
ConvertUsesToOperand(CreateAssignedOperand(zone));
}
void LiveRange::MakeSpilled(Zone* zone) {
void LiveRange::MakeSpilled() {
DCHECK(!IsSpilled());
DCHECK(TopLevel()->HasAllocatedSpillOperand());
DCHECK(!TopLevel()->HasNoSpillType());
spilled_ = true;
assigned_register_ = kInvalidAssignment;
ConvertOperands(zone);
}
bool LiveRange::HasAllocatedSpillOperand() const {
DCHECK(spill_operand_ != nullptr);
return !spill_operand_->IsIgnored() || spill_range_ != nullptr;
void LiveRange::SpillAtDefinition(Zone* zone, int gap_index,
InstructionOperand* operand) {
DCHECK(HasNoSpillType());
spills_at_definition_ = new (zone)
SpillAtDefinitionList(gap_index, operand, spills_at_definition_);
}
void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
InstructionOperand* op) {
auto to_spill = TopLevel()->spills_at_definition_;
if (to_spill == nullptr) return;
auto zone = sequence->zone();
for (; to_spill != nullptr; to_spill = to_spill->next) {
auto gap = sequence->GapAt(to_spill->gap_index);
auto move = gap->GetOrCreateParallelMove(GapInstruction::START, zone);
move->AddMove(to_spill->operand, op, zone);
}
TopLevel()->spills_at_definition_ = nullptr;
}
void LiveRange::SetSpillOperand(InstructionOperand* operand) {
DCHECK(HasNoSpillType());
DCHECK(!operand->IsUnallocated());
DCHECK(spill_operand_ != nullptr);
DCHECK(spill_operand_->IsIgnored());
spill_operand_->ConvertTo(operand->kind(), operand->index());
spill_type_ = SpillType::kSpillOperand;
spill_operand_ = operand;
}
void LiveRange::SetSpillRange(SpillRange* spill_range) {
DCHECK(HasNoSpillType() || HasSpillRange());
DCHECK_NE(spill_range, nullptr);
spill_type_ = SpillType::kSpillRange;
spill_range_ = spill_range;
}
void LiveRange::CommitSpillOperand(InstructionOperand* operand) {
DCHECK(spill_range_ != nullptr);
DCHECK(HasSpillRange());
DCHECK(!operand->IsUnallocated());
DCHECK(!IsChild());
spill_range_ = nullptr;
SetSpillOperand(operand);
for (auto range = this; range != nullptr; range = range->next()) {
if (range->IsSpilled()) {
range->ConvertUsesToOperand(operand);
}
}
spill_type_ = SpillType::kSpillOperand;
spill_operand_ = operand;
}
......@@ -235,15 +264,11 @@ InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) const {
default:
UNREACHABLE();
}
} else if (IsSpilled()) {
} else {
DCHECK(IsSpilled());
DCHECK(!HasRegisterAssigned());
op = TopLevel()->GetSpillOperand();
DCHECK(!op->IsUnallocated());
} else {
UnallocatedOperand* unalloc =
new (zone) UnallocatedOperand(UnallocatedOperand::NONE);
unalloc->set_virtual_register(id_);
op = unalloc;
}
return op;
}
......@@ -482,11 +507,6 @@ void LiveRange::ConvertUsesToOperand(InstructionOperand* op) {
}
void LiveRange::ConvertOperands(Zone* zone) {
ConvertUsesToOperand(CreateAssignedOperand(zone));
}
bool LiveRange::CanCover(LifetimePosition position) const {
if (IsEmpty()) return false;
return Start().Value() <= position.Value() &&
......@@ -817,7 +837,7 @@ SpillRange::SpillRange(LiveRange* range, Zone* zone) : live_ranges_(zone) {
use_interval_ = result;
live_ranges().push_back(range);
end_position_ = node->end();
DCHECK(range->GetSpillRange() == nullptr);
DCHECK(!range->HasSpillRange());
range->SetSpillRange(this);
}
......@@ -921,6 +941,20 @@ void RegisterAllocator::ReuseSpillSlots() {
}
void RegisterAllocator::CommitAssignment() {
for (auto range : live_ranges()) {
if (range == nullptr || range->IsEmpty()) continue;
// Register assignments were committed in set_assigned_register.
if (range->HasRegisterAssigned()) continue;
auto assigned = range->CreateAssignedOperand(code_zone());
range->ConvertUsesToOperand(assigned);
if (range->IsSpilled()) {
range->CommitSpillsAtDefinition(code(), assigned);
}
}
}
SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) {
DCHECK(FLAG_turbo_reuse_spill_slots);
auto spill_range = new (local_zone()) SpillRange(range, local_zone());
......@@ -931,7 +965,7 @@ SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) {
bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) {
DCHECK(FLAG_turbo_reuse_spill_slots);
DCHECK(!range->HasAllocatedSpillOperand());
DCHECK(range->HasNoSpillType());
if (range->IsChild() || !range->is_phi()) return false;
auto lookup = phi_map_.find(range->id());
......@@ -1075,16 +1109,8 @@ void RegisterAllocator::MeetRegisterConstraintsForLastInstructionInBlock(
const InstructionBlock* successor = code()->InstructionBlockAt(succ);
DCHECK(successor->PredecessorCount() == 1);
int gap_index = successor->first_instruction_index() + 1;
range->SpillAtDefinition(local_zone(), gap_index, output);
range->SetSpillStartIndex(gap_index);
// This move to spill operand is not a real use. Liveness analysis
// and splitting of live ranges do not account for it.
// Thus it should be inserted to a lifetime position corresponding to
// the instruction end.
auto gap = code()->GapAt(gap_index);
auto move =
gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone());
move->AddMove(output, range->GetSpillOperand(), code_zone());
}
}
}
......@@ -1133,16 +1159,8 @@ void RegisterAllocator::MeetConstraintsBetween(Instruction* first,
// Make sure we add a gap move for spilling (if we have not done
// so already).
if (!assigned) {
range->SpillAtDefinition(local_zone(), gap_index, first_output);
range->SetSpillStartIndex(gap_index);
// This move to spill operand is not a real use. Liveness analysis
// and splitting of live ranges do not account for it.
// Thus it should be inserted to a lifetime position corresponding to
// the instruction end.
auto gap = code()->GapAt(gap_index);
auto move =
gap->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone());
move->AddMove(first_output, range->GetSpillOperand(), code_zone());
}
}
}
......@@ -1238,7 +1256,6 @@ void RegisterAllocator::ProcessInstructions(const InstructionBlock* block,
const ZoneList<MoveOperands>* move_operands = move->move_operands();
for (int i = 0; i < move_operands->length(); ++i) {
auto cur = &move_operands->at(i);
if (cur->IsIgnored()) continue;
auto from = cur->source();
auto to = cur->destination();
auto hint = to;
......@@ -1361,10 +1378,9 @@ void RegisterAllocator::ResolvePhis(const InstructionBlock* block) {
}
}
auto live_range = LiveRangeFor(phi_vreg);
auto block_start = code()->GetBlockStart(block->rpo_number());
block_start->GetOrCreateParallelMove(GapInstruction::BEFORE, code_zone())
->AddMove(output, live_range->GetSpillOperand(), code_zone());
live_range->SetSpillStartIndex(block->first_instruction_index());
int gap_index = block->first_instruction_index();
live_range->SpillAtDefinition(local_zone(), gap_index, output);
live_range->SetSpillStartIndex(gap_index);
// We use the phi-ness of some nodes in some later heuristics.
live_range->set_is_phi(true);
live_range->set_is_non_loop_phi(!block->IsLoopHeader());
......@@ -1723,8 +1739,7 @@ void RegisterAllocator::BuildLiveRanges() {
// live ranges, every use requires the constant to be in a register.
// Without this hack, all uses with "any" policy would get the constant
// operand assigned.
if (range->HasAllocatedSpillOperand() &&
range->GetSpillOperand()->IsConstant()) {
if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) {
pos->register_beneficial_ = true;
// TODO(dcarney): should the else case assert requires_reg_ == false?
......@@ -1828,7 +1843,7 @@ void RegisterAllocator::PopulatePointerMaps() {
// Check if the live range is spilled and the safe point is after
// the spill position.
if (range->HasAllocatedSpillOperand() &&
if (range->HasSpillOperand() &&
safe_point >= range->spill_start_index() &&
!range->GetSpillOperand()->IsConstant()) {
TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
......@@ -1908,7 +1923,7 @@ void RegisterAllocator::AllocateRegisters() {
TraceAlloc("Processing interval %d start=%d\n", current->id(),
position.Value());
if (current->HasAllocatedSpillOperand()) {
if (!current->HasNoSpillType()) {
TraceAlloc("Live range %d already has a spill operand\n", current->id());
auto next_pos = position;
if (code()->IsGapAt(next_pos.InstructionIndex())) {
......@@ -2073,7 +2088,7 @@ void RegisterAllocator::FreeSpillSlot(LiveRange* range) {
DCHECK(!FLAG_turbo_reuse_spill_slots);
// Check that we are the last range.
if (range->next() != nullptr) return;
if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
if (!range->TopLevel()->HasSpillOperand()) return;
auto spill_operand = range->TopLevel()->GetSpillOperand();
if (spill_operand->IsConstant()) return;
if (spill_operand->index() >= 0) {
......@@ -2488,7 +2503,7 @@ void RegisterAllocator::Spill(LiveRange* range) {
DCHECK(!range->IsSpilled());
TraceAlloc("Spilling live range %d\n", range->id());
auto first = range->TopLevel();
if (!first->HasAllocatedSpillOperand()) {
if (first->HasNoSpillType()) {
if (FLAG_turbo_reuse_spill_slots) {
AssignSpillRangeToLiveRange(first);
} else {
......@@ -2497,17 +2512,15 @@ void RegisterAllocator::Spill(LiveRange* range) {
// Allocate a new operand referring to the spill slot.
RegisterKind kind = range->Kind();
int index = frame()->AllocateSpillSlot(kind == DOUBLE_REGISTERS);
if (kind == DOUBLE_REGISTERS) {
op = DoubleStackSlotOperand::Create(index, local_zone());
} else {
DCHECK(kind == GENERAL_REGISTERS);
op = StackSlotOperand::Create(index, local_zone());
}
auto op_kind = kind == DOUBLE_REGISTERS
? InstructionOperand::DOUBLE_STACK_SLOT
: InstructionOperand::STACK_SLOT;
op = new (code_zone()) InstructionOperand(op_kind, index);
}
first->SetSpillOperand(op);
}
}
range->MakeSpilled(code_zone());
range->MakeSpilled();
}
......
......@@ -197,7 +197,7 @@ class LiveRange FINAL : public ZoneObject {
int assigned_register() const { return assigned_register_; }
int spill_start_index() const { return spill_start_index_; }
void set_assigned_register(int reg, Zone* zone);
void MakeSpilled(Zone* zone);
void MakeSpilled();
bool is_phi() const { return is_phi_; }
void set_is_phi(bool is_phi) { is_phi_ = is_phi; }
bool is_non_loop_phi() const { return is_non_loop_phi_; }
......@@ -260,13 +260,27 @@ class LiveRange FINAL : public ZoneObject {
return last_interval_->end();
}
bool HasAllocatedSpillOperand() const;
InstructionOperand* GetSpillOperand() const { return spill_operand_; }
void SetSpillOperand(InstructionOperand* operand);
enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
SpillType spill_type() const { return spill_type_; }
InstructionOperand* GetSpillOperand() const {
return spill_type_ == SpillType::kSpillOperand ? spill_operand_ : nullptr;
}
SpillRange* GetSpillRange() const {
return spill_type_ == SpillType::kSpillRange ? spill_range_ : nullptr;
}
bool HasNoSpillType() const { return spill_type_ == SpillType::kNoSpillType; }
bool HasSpillOperand() const {
return spill_type_ == SpillType::kSpillOperand;
}
bool HasSpillRange() const { return spill_type_ == SpillType::kSpillRange; }
void SetSpillRange(SpillRange* spill_range) { spill_range_ = spill_range; }
SpillRange* GetSpillRange() const { return spill_range_; }
void SpillAtDefinition(Zone* zone, int gap_index,
InstructionOperand* operand);
void SetSpillOperand(InstructionOperand* operand);
void SetSpillRange(SpillRange* spill_range);
void CommitSpillOperand(InstructionOperand* operand);
void CommitSpillsAtDefinition(InstructionSequence* sequence,
InstructionOperand* operand);
void SetSpillStartIndex(int start) {
spill_start_index_ = Min(start, spill_start_index_);
......@@ -293,12 +307,14 @@ class LiveRange FINAL : public ZoneObject {
#endif
private:
void ConvertOperands(Zone* zone);
struct SpillAtDefinitionList;
void ConvertUsesToOperand(InstructionOperand* op);
UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
void AdvanceLastProcessedMarker(UseInterval* to_start_of,
LifetimePosition but_not_past) const;
// TODO(dcarney): pack this structure better.
int id_;
bool spilled_;
bool is_phi_;
......@@ -315,9 +331,13 @@ class LiveRange FINAL : public ZoneObject {
UsePosition* last_processed_use_;
// This is used as a cache, it's invalid outside of BuildLiveRanges.
InstructionOperand* current_hint_operand_;
InstructionOperand* spill_operand_;
int spill_start_index_;
SpillRange* spill_range_;
SpillType spill_type_;
union {
InstructionOperand* spill_operand_;
SpillRange* spill_range_;
};
SpillAtDefinitionList* spills_at_definition_;
friend class RegisterAllocator; // Assigns to kind_.
......@@ -388,13 +408,16 @@ class RegisterAllocator FINAL : public ZoneObject {
// Phase 5: reassign spill splots for maximal reuse.
void ReuseSpillSlots();
// Phase 6: compute values for pointer maps.
// Phase 6: commit assignment.
void CommitAssignment();
// Phase 7: compute values for pointer maps.
void PopulatePointerMaps(); // TODO(titzer): rename to PopulateReferenceMaps.
// Phase 7: reconnect split ranges with moves.
// Phase 8: reconnect split ranges with moves.
void ConnectRanges();
// Phase 8: insert moves to connect ranges across basic blocks.
// Phase 9: insert moves to connect ranges across basic blocks.
void ResolveControlFlow();
private:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment