Commit 1cd60451 authored by mtrofin's avatar mtrofin Committed by Commit bot

[turbofan] Greedy allocator refactoring.

Separated core greedy allocator concepts, exposing the APIs we would want to continue working with. In particular, this change completely reworks CoalescedLiveRanges to reflect the fact that we expect more than one possible conflict, scrapping the initial design of the structure. Since this is a critical part of the design, this change may be thought of as a full rewrite of the algorithm.

Reduced all heuristics to just 2 essential ones: split "somewhere", which we'll still need when all other heuristics fail; and spill.

Introduced a simple primitive for splitting - at GapPosition::START. The goal is to use such primitives to quickly and reliably author heuristics.

I expected this primitive to "just work" for any arbitrary instruction index within a live range - e.g. its middle. That's not the case, it seems to upset execution in certain scenarios. Restricting to either before/after use positions seems to work. I'm still investigating what the source of failures is in the case of "arbitrary instruction in the range" case.

I intended to document the rationale and prove the soundness of always using START for splits, but I will postpone to after this last remaining issue is resolved.

Review URL: https://codereview.chromium.org/1205173002

Cr-Commit-Position: refs/heads/master@{#29352}
parent e291b78a
......@@ -622,6 +622,8 @@ source_set("v8_base") {
"src/compiler/basic-block-instrumentor.h",
"src/compiler/change-lowering.cc",
"src/compiler/change-lowering.h",
"src/compiler/coalesced-live-ranges.cc",
"src/compiler/coalesced-live-ranges.h",
"src/compiler/code-generator-impl.h",
"src/compiler/code-generator.cc",
"src/compiler/code-generator.h",
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/coalesced-live-ranges.h"
#include "src/compiler/greedy-allocator.h"
#include "src/compiler/register-allocator.h"
namespace v8 {
namespace internal {
namespace compiler {
#define TRACE(...) \
do { \
if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
} while (false)
const float CoalescedLiveRanges::kAllocatedRangeMultiplier = 10.0;
void CoalescedLiveRanges::AllocateRange(LiveRange* range) {
UpdateWeightAtAllocation(range);
for (auto interval = range->first_interval(); interval != nullptr;
interval = interval->next()) {
storage().insert({interval->start(), interval->end(), range});
}
}
void CoalescedLiveRanges::Remove(LiveRange* range) {
for (auto interval = range->first_interval(); interval != nullptr;
interval = interval->next()) {
storage().erase({interval->start(), interval->end(), nullptr});
}
range->UnsetAssignedRegister();
}
float CoalescedLiveRanges::GetMaximumConflictingWeight(
const LiveRange* range) const {
float ret = LiveRange::kInvalidWeight;
auto end = storage().end();
for (auto query = range->first_interval(); query != nullptr;
query = query->next()) {
auto conflict = GetFirstConflict(query);
if (conflict == end) continue;
for (; QueryIntersectsAllocatedInterval(query, conflict); ++conflict) {
// It is possible we'll visit the same range multiple times, because
// successive (not necessarily consecutive) intervals belong to the same
// range, or because different intervals of the query range have the same
// range as conflict.
DCHECK_NE(conflict->range->weight(), LiveRange::kInvalidWeight);
ret = Max(ret, conflict->range->weight());
if (ret == LiveRange::kMaxWeight) break;
}
}
return ret;
}
void CoalescedLiveRanges::EvictAndRescheduleConflicts(
LiveRange* range, AllocationScheduler* scheduler) {
auto end = storage().end();
for (auto query = range->first_interval(); query != nullptr;
query = query->next()) {
auto conflict = GetFirstConflict(query);
if (conflict == end) continue;
while (QueryIntersectsAllocatedInterval(query, conflict)) {
LiveRange* range_to_evict = conflict->range;
// Bypass successive intervals belonging to the same range, because we're
// about to remove this range, and we don't want the storage iterator to
// become invalid.
while (conflict != end && conflict->range == range_to_evict) {
++conflict;
}
DCHECK(range_to_evict->HasRegisterAssigned());
CHECK(!range_to_evict->IsFixed());
Remove(range_to_evict);
UpdateWeightAtEviction(range_to_evict);
TRACE("Evicted range %d.\n", range_to_evict->id());
scheduler->Schedule(range_to_evict);
}
}
}
bool CoalescedLiveRanges::VerifyAllocationsAreValid() const {
LifetimePosition last_end = LifetimePosition::GapFromInstructionIndex(0);
for (auto i : storage_) {
if (i.start < last_end) {
return false;
}
last_end = i.end;
}
return true;
}
void CoalescedLiveRanges::UpdateWeightAtAllocation(LiveRange* range) {
DCHECK_NE(range->weight(), LiveRange::kInvalidWeight);
range->set_weight(range->weight() * kAllocatedRangeMultiplier);
}
void CoalescedLiveRanges::UpdateWeightAtEviction(LiveRange* range) {
DCHECK_NE(range->weight(), LiveRange::kInvalidWeight);
range->set_weight(range->weight() / kAllocatedRangeMultiplier);
}
CoalescedLiveRanges::interval_iterator CoalescedLiveRanges::GetFirstConflict(
const UseInterval* query) const {
DCHECK(query != nullptr);
auto end = storage().end();
LifetimePosition q_start = query->start();
LifetimePosition q_end = query->end();
if (storage().empty() || storage().rbegin()->end <= q_start ||
storage().begin()->start >= q_end) {
return end;
}
auto ret = storage().upper_bound(AsAllocatedInterval(q_start));
// ret is either at the end (no start strictly greater than q_start) or
// at some position with the aforementioned property. In either case, the
// allocated interval before this one may intersect our query:
// either because, although it starts before this query's start, it ends
// after; or because it starts exactly at the query start. So unless we're
// right at the beginning of the storage - meaning the first allocated
// interval is also starting after this query's start - see what's behind.
if (ret != storage().begin()) {
--ret;
if (!QueryIntersectsAllocatedInterval(query, ret)) {
// The interval behind wasn't intersecting, so move back.
++ret;
}
}
if (ret != end && QueryIntersectsAllocatedInterval(query, ret)) return ret;
return end;
}
} // namespace compiler
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COALESCED_LIVE_RANGES_H_
#define V8_COALESCED_LIVE_RANGES_H_
#include "src/compiler/register-allocator.h"
#include "src/zone-containers.h"
namespace v8 {
namespace internal {
namespace compiler {
class AllocationScheduler;
// Collection of live ranges allocated to the same register.
// It supports efficiently finding all conflicts for a given, non-allocated
// range. See AllocatedInterval.
// Allocated live ranges do not intersect. At most, individual use intervals
// touch. We store, for a live range, an AllocatedInterval corresponding to each
// of that range's UseIntervals. We keep the list of AllocatedIntervals sorted
// by starts. Then, given the non-intersecting property, we know that
// consecutive AllocatedIntervals have the property that the "smaller"'s end is
// less or equal to the "larger"'s start.
// This allows for quick (logarithmic complexity) identification of the first
// AllocatedInterval to conflict with a given LiveRange, and then for efficient
// traversal of conflicts.
class CoalescedLiveRanges : public ZoneObject {
public:
explicit CoalescedLiveRanges(Zone* zone) : storage_(zone) {}
void clear() { storage_.clear(); }
bool empty() const { return storage_.empty(); }
// Returns kInvalidWeight if there are no conflicts, or the largest weight of
// a range conflicting with the given range.
float GetMaximumConflictingWeight(const LiveRange* range) const;
// Evicts all conflicts of the given range, and reschedules them with the
// provided scheduler.
void EvictAndRescheduleConflicts(LiveRange* range,
AllocationScheduler* scheduler);
// Allocates a range with a pre-calculated candidate weight.
void AllocateRange(LiveRange* range);
// TODO(mtrofin): remove this in favor of comprehensive unit tests.
bool VerifyAllocationsAreValid() const;
private:
static const float kAllocatedRangeMultiplier;
// Storage detail for CoalescedLiveRanges.
struct AllocatedInterval {
LifetimePosition start;
LifetimePosition end;
LiveRange* range;
bool operator<(const AllocatedInterval& other) const {
return start < other.start;
}
bool operator>(const AllocatedInterval& other) const {
return start > other.start;
}
};
typedef ZoneSet<AllocatedInterval> IntervalStore;
typedef IntervalStore::const_iterator interval_iterator;
IntervalStore& storage() { return storage_; }
const IntervalStore& storage() const { return storage_; }
// Augment the weight of a range that is about to be allocated.
static void UpdateWeightAtAllocation(LiveRange* range);
// Reduce the weight of a range that has lost allocation.
static void UpdateWeightAtEviction(LiveRange* range);
// Intersection utilities.
static bool Intersects(LifetimePosition a_start, LifetimePosition a_end,
LifetimePosition b_start, LifetimePosition b_end) {
return a_start < b_end && b_start < a_end;
}
static AllocatedInterval AsAllocatedInterval(LifetimePosition pos) {
return {pos, LifetimePosition::Invalid(), nullptr};
}
bool QueryIntersectsAllocatedInterval(const UseInterval* query,
interval_iterator& pos) const {
DCHECK(query != nullptr);
return pos != storage().end() &&
Intersects(query->start(), query->end(), pos->start, pos->end);
}
void Remove(LiveRange* range);
// Get the first interval intersecting query. Since the intervals are sorted,
// subsequent intervals intersecting query follow.
interval_iterator GetFirstConflict(const UseInterval* query) const;
IntervalStore storage_;
DISALLOW_COPY_AND_ASSIGN(CoalescedLiveRanges);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COALESCED_LIVE_RANGES_H_
This diff is collapsed.
......@@ -5,6 +5,7 @@
#ifndef V8_GREEDY_ALLOCATOR_H_
#define V8_GREEDY_ALLOCATOR_H_
#include "src/compiler/coalesced-live-ranges.h"
#include "src/compiler/register-allocator.h"
#include "src/zone-containers.h"
......@@ -12,7 +13,43 @@ namespace v8 {
namespace internal {
namespace compiler {
class CoalescedLiveRanges;
// The object of allocation scheduling. At minimum, this is a LiveRange, but
// we may extend this to groups of LiveRanges. It has to be comparable.
class AllocationCandidate {
public:
explicit AllocationCandidate(LiveRange* range) : range_(range) {}
// Strict ordering operators
bool operator<(const AllocationCandidate& other) const {
return range_->GetSize() < other.range_->GetSize();
}
bool operator>(const AllocationCandidate& other) const {
return range_->GetSize() > other.range_->GetSize();
}
LiveRange* live_range() const { return range_; }
private:
LiveRange* range_;
};
// Schedule processing (allocating) of AllocationCandidates.
class AllocationScheduler final : ZoneObject {
public:
explicit AllocationScheduler(Zone* zone) : queue_(zone) {}
void Schedule(LiveRange* range);
AllocationCandidate GetNext();
bool empty() const { return queue_.empty(); }
private:
typedef ZonePriorityQueue<AllocationCandidate> ScheduleQueue;
ScheduleQueue queue_;
DISALLOW_COPY_AND_ASSIGN(AllocationScheduler);
};
// A variant of the LLVM Greedy Register Allocator. See
......@@ -25,39 +62,47 @@ class GreedyAllocator final : public RegisterAllocator {
void AllocateRegisters();
private:
LifetimePosition GetSplittablePos(LifetimePosition pos);
const RegisterConfiguration* config() const { return data()->config(); }
AllocationScheduler& scheduler() { return scheduler_; }
CoalescedLiveRanges* current_allocations(unsigned i) {
return allocations_[i];
}
Zone* local_zone() const { return local_zone_; }
int GetHintedRegister(LiveRange* range);
// Insert fixed ranges.
void PreallocateFixedRanges();
typedef ZonePriorityQueue<std::pair<unsigned, LiveRange*>> PQueue;
// Schedule unassigned live ranges for allocation.
// TODO(mtrofin): groups.
void ScheduleAllocationCandidates();
unsigned GetLiveRangeSize(LiveRange* range);
void Enqueue(LiveRange* range);
// Find the optimal split for ranges defined by a memory operand, e.g.
// constants or function parameters passed on the stack.
void SplitAndSpillRangesDefinedByMemoryOperand();
void Evict(LiveRange* range);
float CalculateSpillWeight(LiveRange* range);
float CalculateMaxSpillWeight(const ZoneSet<LiveRange*>& ranges);
void TryAllocateCandidate(const AllocationCandidate& candidate);
void TryAllocateLiveRange(LiveRange* range);
bool CanProcessRange(LiveRange* range) const {
return range != nullptr && !range->IsEmpty() && range->kind() == mode();
}
bool TryAllocate(LiveRange* current, ZoneSet<LiveRange*>* conflicting);
bool TryAllocatePhysicalRegister(unsigned reg_id, LiveRange* range,
ZoneSet<LiveRange*>* conflicting);
bool HandleSpillOperands(LiveRange* range);
void AllocateBlockedRange(LiveRange* current, LifetimePosition pos,
bool spill);
// Calculate the weight of a candidate for allocation.
void EnsureValidRangeWeight(LiveRange* range);
LiveRange* SpillBetweenUntil(LiveRange* range, LifetimePosition start,
LifetimePosition until, LifetimePosition end);
void AssignRangeToRegister(int reg_id, LiveRange* range);
// Calculate the new weight of a range that is about to be allocated.
float GetAllocatedRangeWeight(float candidate_weight);
// This is the extension point for splitting heuristics.
void SplitOrSpillBlockedRange(LiveRange* range);
LifetimePosition FindProgressingSplitPosition(LiveRange* range,
bool* is_spill_pos);
// Necessary heuristic: spill when all else failed.
void SpillRangeAsLastResort(LiveRange* range);
void AssignRangeToRegister(int reg_id, LiveRange* range);
Zone* local_zone_;
ZoneVector<CoalescedLiveRanges*> allocations_;
PQueue queue_;
AllocationScheduler scheduler_;
DISALLOW_COPY_AND_ASSIGN(GreedyAllocator);
};
} // namespace compiler
......
......@@ -237,6 +237,10 @@ struct LiveRange::SpillAtDefinitionList : ZoneObject {
};
const float LiveRange::kInvalidWeight = -1;
const float LiveRange::kMaxWeight = std::numeric_limits<float>::max();
LiveRange::LiveRange(int id, MachineType machine_type)
: id_(id),
spill_start_index_(kMaxInt),
......@@ -250,7 +254,9 @@ LiveRange::LiveRange(int id, MachineType machine_type)
spills_at_definition_(nullptr),
current_interval_(nullptr),
last_processed_use_(nullptr),
current_hint_position_(nullptr) {
current_hint_position_(nullptr),
size_(kInvalidSize),
weight_(kInvalidWeight) {
DCHECK(AllocatedOperand::IsSupportedMachineType(machine_type));
bits_ = SpillTypeField::encode(SpillType::kNoSpillType) |
AssignedRegisterField::encode(kUnassignedRegister) |
......@@ -559,6 +565,10 @@ void LiveRange::SplitAt(LifetimePosition position, LiveRange* result,
result->next_ = next_;
next_ = result;
// Invalidate size and weight of this range. The child range has them
// invalid at construction.
size_ = kInvalidSize;
weight_ = kInvalidWeight;
#ifdef DEBUG
Verify();
result->Verify();
......@@ -749,6 +759,19 @@ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) const {
}
unsigned LiveRange::GetSize() {
if (size_ == kInvalidSize) {
size_ = 0;
for (auto interval = first_interval(); interval != nullptr;
interval = interval->next()) {
size_ += (interval->end().value() - interval->start().value());
}
}
return static_cast<unsigned>(size_);
}
static bool AreUseIntervalsIntersecting(UseInterval* interval1,
UseInterval* interval2) {
while (interval1 != nullptr && interval2 != nullptr) {
......@@ -1852,6 +1875,15 @@ const ZoneVector<LiveRange*>& RegisterAllocator::GetFixedRegisters() const {
}
const char* RegisterAllocator::RegisterName(int allocation_index) const {
if (mode() == GENERAL_REGISTERS) {
return data()->config()->general_register_name(allocation_index);
} else {
return data()->config()->double_register_name(allocation_index);
}
}
LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
RegisterKind kind, Zone* local_zone)
: RegisterAllocator(data, kind),
......@@ -1958,15 +1990,6 @@ void LinearScanAllocator::AllocateRegisters() {
}
const char* LinearScanAllocator::RegisterName(int allocation_index) const {
if (mode() == GENERAL_REGISTERS) {
return data()->config()->general_register_name(allocation_index);
} else {
return data()->config()->double_register_name(allocation_index);
}
}
void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
int reg) {
data()->MarkAllocated(range->kind(), reg);
......
......@@ -428,6 +428,15 @@ class LiveRange final : public ZoneObject {
return spills_at_definition_;
}
// Used solely by the Greedy Allocator:
unsigned GetSize();
float weight() const { return weight_; }
void set_weight(float weight) { weight_ = weight; }
static const int kInvalidSize = -1;
static const float kInvalidWeight;
static const float kMaxWeight;
private:
void set_spill_type(SpillType value) {
bits_ = SpillTypeField::update(bits_, value);
......@@ -468,6 +477,14 @@ class LiveRange final : public ZoneObject {
// This is used as a cache, it's invalid outside of BuildLiveRanges.
mutable UsePosition* current_hint_position_;
// greedy: the number of LifetimePositions covered by this range. Used to
// prioritize selecting live ranges for register assignment, as well as
// in weight calculations.
int size_;
// greedy: a metric for resolving conflicts between ranges with an assigned
// register and ranges that intersect them and need a register.
float weight_;
DISALLOW_COPY_AND_ASSIGN(LiveRange);
};
......@@ -770,6 +787,7 @@ class RegisterAllocator : public ZoneObject {
LifetimePosition pos);
const ZoneVector<LiveRange*>& GetFixedRegisters() const;
const char* RegisterName(int allocation_index) const;
private:
RegisterAllocationData* const data_;
......@@ -789,8 +807,6 @@ class LinearScanAllocator final : public RegisterAllocator {
void AllocateRegisters();
private:
const char* RegisterName(int allocation_index) const;
ZoneVector<LiveRange*>& unhandled_live_ranges() {
return unhandled_live_ranges_;
}
......
......@@ -455,6 +455,8 @@
'../../src/compiler/basic-block-instrumentor.h',
'../../src/compiler/change-lowering.cc',
'../../src/compiler/change-lowering.h',
'../../src/compiler/coalesced-live-ranges.cc',
'../../src/compiler/coalesced-live-ranges.h',
'../../src/compiler/code-generator-impl.h',
'../../src/compiler/code-generator.cc',
'../../src/compiler/code-generator.h',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment