Commit 9b39394e authored by Tobias Tebbi's avatar Tobias Tebbi Committed by Commit Bot

[compiler] [register allocator] use priority queue for unhandled live ranges

The motivation for this CL were the expensive UnhandledIsSorted() checks,
which increase mksnapshot time (v8:7895).
In addition, it is a cleanup and removes a potential for quadratic
performance.

Change-Id: I33e734e4e20183768dbf8222adcbb3abc1f1a6bf
Reviewed-on: https://chromium-review.googlesource.com/1116960
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarStephan Herhut <herhut@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54063}
parent 9a1eae43
......@@ -2742,15 +2742,21 @@ const char* RegisterAllocator::RegisterName(int register_code) const {
}
}
// static
LinearScanAllocator::LiveRangeQueue LinearScanAllocator::MakeLiveRangeQueue(
size_t capacity, Zone* local_zone) {
ZoneVector<LiveRange*> backing_store(local_zone);
backing_store.reserve(capacity);
return LiveRangeQueue(LiveRangeOrdering(), std::move(backing_store));
}
LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
RegisterKind kind, Zone* local_zone)
: RegisterAllocator(data, kind),
unhandled_live_ranges_(local_zone),
unhandled_live_ranges_(MakeLiveRangeQueue(
static_cast<size_t>(code()->VirtualRegisterCount() * 2), local_zone)),
active_live_ranges_(local_zone),
inactive_live_ranges_(local_zone) {
unhandled_live_ranges().reserve(
static_cast<size_t>(code()->VirtualRegisterCount() * 2));
active_live_ranges().reserve(8);
inactive_live_ranges().reserve(8);
// TryAllocateFreeReg and AllocateBlockedReg assume this
......@@ -2775,12 +2781,10 @@ void LinearScanAllocator::AllocateRegisters() {
for (LiveRange* to_add = range; to_add != nullptr;
to_add = to_add->next()) {
if (!to_add->spilled()) {
AddToUnhandledUnsorted(to_add);
AddToUnhandled(to_add);
}
}
}
SortUnhandled();
SLOW_DCHECK(UnhandledIsSorted());
if (mode() == GENERAL_REGISTERS) {
for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
......@@ -2801,9 +2805,8 @@ void LinearScanAllocator::AllocateRegisters() {
}
while (!unhandled_live_ranges().empty()) {
SLOW_DCHECK(UnhandledIsSorted());
LiveRange* current = unhandled_live_ranges().back();
unhandled_live_ranges().pop_back();
LiveRange* current = unhandled_live_ranges().top();
unhandled_live_ranges().pop();
LifetimePosition position = current->Start();
#ifdef DEBUG
allocation_finger_ = position;
......@@ -2856,7 +2859,7 @@ bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
return false;
} else if (next_reg->pos().PrevStart() > range->Start()) {
LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
AddToUnhandledSorted(tail);
AddToUnhandled(tail);
Spill(range);
return true;
}
......@@ -2887,72 +2890,14 @@ void LinearScanAllocator::AddToInactive(LiveRange* range) {
inactive_live_ranges().push_back(range);
}
void LinearScanAllocator::AddToUnhandledSorted(LiveRange* range) {
void LinearScanAllocator::AddToUnhandled(LiveRange* range) {
if (range == nullptr || range->IsEmpty()) return;
DCHECK(!range->HasRegisterAssigned() && !range->spilled());
DCHECK(allocation_finger_ <= range->Start());
for (size_t i = unhandled_live_ranges().size(); i-- > 0;) {
LiveRange* cur_range = unhandled_live_ranges().at(i);
if (!range->ShouldBeAllocatedBefore(cur_range)) continue;
TRACE("Add live range %d:%d to unhandled at %zu\n",
range->TopLevel()->vreg(), range->relative_id(), i + 1);
auto it = unhandled_live_ranges().begin() + (i + 1);
unhandled_live_ranges().insert(it, range);
SLOW_DCHECK(UnhandledIsSorted());
return;
}
TRACE("Add live range %d:%d to unhandled at start\n",
range->TopLevel()->vreg(), range->relative_id());
unhandled_live_ranges().insert(unhandled_live_ranges().begin(), range);
SLOW_DCHECK(UnhandledIsSorted());
}
void LinearScanAllocator::AddToUnhandledUnsorted(LiveRange* range) {
if (range == nullptr || range->IsEmpty()) return;
DCHECK(!range->HasRegisterAssigned() && !range->spilled());
TRACE("Add live range %d:%d to unhandled unsorted at end\n",
range->TopLevel()->vreg(), range->relative_id());
unhandled_live_ranges().push_back(range);
}
static bool UnhandledSortHelper(LiveRange* a, LiveRange* b) {
DCHECK(!a->ShouldBeAllocatedBefore(b) || !b->ShouldBeAllocatedBefore(a));
if (a->ShouldBeAllocatedBefore(b)) return false;
if (b->ShouldBeAllocatedBefore(a)) return true;
return a->TopLevel()->vreg() < b->TopLevel()->vreg();
}
// Sort the unhandled live ranges so that the ranges to be processed first are
// at the end of the array list. This is convenient for the register allocation
// algorithm because it is efficient to remove elements from the end.
void LinearScanAllocator::SortUnhandled() {
TRACE("Sort unhandled\n");
std::sort(unhandled_live_ranges().begin(), unhandled_live_ranges().end(),
&UnhandledSortHelper);
}
bool LinearScanAllocator::UnhandledIsSorted() {
// Note: This function is excessively expensive. It doubles time spent in
// mksnapshot in debug builds. All related DCHECKs have now been turned into
// SLOW_DCHECKs, but we could also investigate whether it is necessary or
// could be sped up further. See also: https://crbug.com/v8/7891#c15.
const size_t len = unhandled_live_ranges().size();
if (len < 2) return true;
int lhs = unhandled_live_ranges_[0]->Start().value();
for (size_t i = 1; i < len; i++) {
int rhs = unhandled_live_ranges_[i]->Start().value();
if (lhs < rhs) return false;
lhs = rhs;
}
return true;
TRACE("Add live range %d:%d to unhandled\n", range->TopLevel()->vreg(),
range->relative_id());
unhandled_live_ranges().push(range);
}
......@@ -3173,7 +3118,7 @@ bool LinearScanAllocator::TryAllocateFreeReg(
// Register reg is available at the range start but becomes blocked before
// the range end. Split current at position where it becomes blocked.
LiveRange* tail = SplitRangeAt(current, pos);
AddToUnhandledSorted(tail);
AddToUnhandled(tail);
// Try to allocate preferred register once more.
if (TryAllocatePreferredReg(current, free_until_pos)) return true;
......@@ -3319,7 +3264,7 @@ void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
// position.
LiveRange* tail =
SplitBetween(current, current->Start(), block_pos[reg].Start());
AddToUnhandledSorted(tail);
AddToUnhandled(tail);
}
// Register reg is not blocked for the whole range.
......@@ -3482,7 +3427,6 @@ bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
bool merged = first_op_spill->TryMerge(spill_range);
if (!merged) return false;
SpillBetween(range, range->Start(), pos->pos());
SLOW_DCHECK(UnhandledIsSorted());
return true;
}
return false;
......@@ -3522,11 +3466,11 @@ void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
DCHECK(third_part != second_part);
Spill(second_part);
AddToUnhandledSorted(third_part);
AddToUnhandled(third_part);
} else {
// The split result does not intersect with [start, end[.
// Nothing to spill. Just put it to unhandled as whole.
AddToUnhandledSorted(second_part);
AddToUnhandled(second_part);
}
}
......
......@@ -1040,9 +1040,15 @@ class LinearScanAllocator final : public RegisterAllocator {
void AllocateRegisters();
private:
ZoneVector<LiveRange*>& unhandled_live_ranges() {
return unhandled_live_ranges_;
}
struct LiveRangeOrdering {
bool operator()(LiveRange* a, LiveRange* b) {
return b->ShouldBeAllocatedBefore(a);
}
};
using LiveRangeQueue = std::priority_queue<LiveRange*, ZoneVector<LiveRange*>,
LiveRangeOrdering>;
static LiveRangeQueue MakeLiveRangeQueue(size_t capacity, Zone* local_zone);
LiveRangeQueue& unhandled_live_ranges() { return unhandled_live_ranges_; }
ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
ZoneVector<LiveRange*>& inactive_live_ranges() {
return inactive_live_ranges_;
......@@ -1053,10 +1059,7 @@ class LinearScanAllocator final : public RegisterAllocator {
// Helper methods for updating the life range lists.
void AddToActive(LiveRange* range);
void AddToInactive(LiveRange* range);
void AddToUnhandledSorted(LiveRange* range);
void AddToUnhandledUnsorted(LiveRange* range);
void SortUnhandled();
bool UnhandledIsSorted();
void AddToUnhandled(LiveRange* range);
void ActiveToHandled(LiveRange* range);
void ActiveToInactive(LiveRange* range);
void InactiveToHandled(LiveRange* range);
......@@ -1090,7 +1093,7 @@ class LinearScanAllocator final : public RegisterAllocator {
void SplitAndSpillIntersecting(LiveRange* range);
ZoneVector<LiveRange*> unhandled_live_ranges_;
LiveRangeQueue unhandled_live_ranges_;
ZoneVector<LiveRange*> active_live_ranges_;
ZoneVector<LiveRange*> inactive_live_ranges_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment