Commit 13781060 authored by Sigurd Schneider's avatar Sigurd Schneider Committed by Commit Bot

[regalloc] Rename --trace-alloc to --trace-turbo-alloc

And make --trace-turbo-alloc honor --trace-turbo-filter

This is useful to filter out a specific compile job, e.g.
if mksnapshot is crashing it easily produces 5GB of logs
without filter.

TBR=bmeurer@chromium.org

Change-Id: Ic7dea0a4cef793b517d98ca2ba1f6ea6eeac63ea
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1521111
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62536}
parent e9d93bab
......@@ -242,6 +242,7 @@ void OptimizedCompilationInfo::SetTracingFlags(bool passes_filter) {
if (FLAG_trace_turbo) SetFlag(kTraceTurboJson);
if (FLAG_trace_turbo_graph) SetFlag(kTraceTurboGraph);
if (FLAG_trace_turbo_scheduled) SetFlag(kTraceTurboScheduled);
if (FLAG_trace_turbo_alloc) SetFlag(kTraceTurboAllocation);
if (FLAG_trace_heap_broker) SetFlag(kTraceHeapBroker);
}
......
......@@ -60,10 +60,11 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
kTraceTurboJson = 1 << 14,
kTraceTurboGraph = 1 << 15,
kTraceTurboScheduled = 1 << 16,
kTraceHeapBroker = 1 << 17,
kWasmRuntimeExceptionSupport = 1 << 18,
kTurboControlFlowAwareAllocation = 1 << 19,
kTurboPreprocessRanges = 1 << 20
kTraceTurboAllocation = 1 << 17,
kTraceHeapBroker = 1 << 18,
kWasmRuntimeExceptionSupport = 1 << 19,
kTurboControlFlowAwareAllocation = 1 << 20,
kTurboPreprocessRanges = 1 << 21
};
// Construct a compilation info for optimized compilation.
......@@ -190,6 +191,10 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
bool trace_turbo_graph_enabled() const { return GetFlag(kTraceTurboGraph); }
bool trace_turbo_allocation_enabled() const {
return GetFlag(kTraceTurboAllocation);
}
bool trace_turbo_scheduled_enabled() const {
return GetFlag(kTraceTurboScheduled);
}
......
......@@ -9,15 +9,16 @@ namespace v8 {
namespace internal {
namespace compiler {
#define TRACE(...) \
do { \
if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
#define TRACE_COND(cond, ...) \
do { \
if (cond) PrintF(__VA_ARGS__); \
} while (false)
namespace {
void CreateSplinter(TopLevelLiveRange* range, RegisterAllocationData* data,
LifetimePosition first_cut, LifetimePosition last_cut) {
LifetimePosition first_cut, LifetimePosition last_cut,
bool trace_alloc) {
DCHECK(!range->IsSplinter());
// We can ignore ranges that live solely in deferred blocks.
// If a range ends right at the end of a deferred block, it is marked by
......@@ -49,9 +50,10 @@ void CreateSplinter(TopLevelLiveRange* range, RegisterAllocationData* data,
range->SetSplinter(splinter);
}
Zone* zone = data->allocation_zone();
TRACE("creating splinter %d for range %d between %d and %d\n",
range->splinter()->vreg(), range->vreg(), start.ToInstructionIndex(),
end.ToInstructionIndex());
TRACE_COND(trace_alloc,
"creating splinter %d for range %d between %d and %d\n",
range->splinter()->vreg(), range->vreg(),
start.ToInstructionIndex(), end.ToInstructionIndex());
range->Splinter(start, end, zone);
}
}
......@@ -102,7 +104,8 @@ void SplinterLiveRange(TopLevelLiveRange* range, RegisterAllocationData* data) {
current_block->last_instruction_index());
} else {
if (first_cut.IsValid()) {
CreateSplinter(range, data, first_cut, last_cut);
CreateSplinter(range, data, first_cut, last_cut,
data->is_trace_alloc());
first_cut = LifetimePosition::Invalid();
last_cut = LifetimePosition::Invalid();
}
......@@ -116,7 +119,8 @@ void SplinterLiveRange(TopLevelLiveRange* range, RegisterAllocationData* data) {
// have to connect blocks anyway, so we can also splinter to the end of the
// block, too.
if (first_cut.IsValid()) {
CreateSplinter(range, data, first_cut, interval_end);
CreateSplinter(range, data, first_cut, interval_end,
data->is_trace_alloc());
first_cut = LifetimePosition::Invalid();
last_cut = LifetimePosition::Invalid();
}
......@@ -186,7 +190,7 @@ void LiveRangeMerger::Merge() {
}
}
#undef TRACE
#undef TRACE_COND
} // namespace compiler
} // namespace internal
......
......@@ -17,11 +17,13 @@ namespace v8 {
namespace internal {
namespace compiler {
#define TRACE(...) \
do { \
if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
#define TRACE_COND(cond, ...) \
do { \
if (cond) PrintF(__VA_ARGS__); \
} while (false)
#define TRACE(...) TRACE_COND(data()->is_trace_alloc(), __VA_ARGS__)
namespace {
static constexpr int kFloat32Bit =
......@@ -1119,8 +1121,9 @@ void TopLevelLiveRange::Verify() const {
}
}
void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
TRACE("Shorten live range %d to [%d\n", vreg(), start.value());
void TopLevelLiveRange::ShortenTo(LifetimePosition start, bool trace_alloc) {
TRACE_COND(trace_alloc, "Shorten live range %d to [%d\n", vreg(),
start.value());
DCHECK_NOT_NULL(first_interval_);
DCHECK(first_interval_->start() <= start);
DCHECK(start < first_interval_->end());
......@@ -1128,9 +1131,10 @@ void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
}
void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
LifetimePosition end, Zone* zone) {
TRACE("Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
end.value());
LifetimePosition end, Zone* zone,
bool trace_alloc) {
TRACE_COND(trace_alloc, "Ensure live range %d in interval [%d %d[\n", vreg(),
start.value(), end.value());
LifetimePosition new_end = end;
while (first_interval_ != nullptr && first_interval_->start() <= end) {
if (first_interval_->end() > end) {
......@@ -1148,9 +1152,10 @@ void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
}
void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
LifetimePosition end, Zone* zone) {
TRACE("Add to live range %d interval [%d %d[\n", vreg(), start.value(),
end.value());
LifetimePosition end, Zone* zone,
bool trace_alloc) {
TRACE_COND(trace_alloc, "Add to live range %d interval [%d %d[\n", vreg(),
start.value(), end.value());
if (first_interval_ == nullptr) {
UseInterval* interval = new (zone) UseInterval(start, end);
first_interval_ = interval;
......@@ -1173,9 +1178,10 @@ void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
}
}
void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos, bool trace_alloc) {
LifetimePosition pos = use_pos->pos();
TRACE("Add to live range %d use position %d\n", vreg(), pos.value());
TRACE_COND(trace_alloc, "Add to live range %d use position %d\n", vreg(),
pos.value());
UsePosition* prev_hint = nullptr;
UsePosition* prev = nullptr;
UsePosition* current = first_pos_;
......@@ -1309,13 +1315,8 @@ void LinearScanAllocator::PrintRangeRow(std::ostream& os,
if (range->spilled()) {
prefix = snprintf(buffer, max_prefix_length, "|%s", kind_string);
} else {
const char* reg_name;
if (range->assigned_register() == kUnassignedRegister) {
reg_name = "???";
} else {
reg_name = RegisterName(range->assigned_register());
}
prefix = snprintf(buffer, max_prefix_length, "|%s", reg_name);
prefix = snprintf(buffer, max_prefix_length, "|%s",
RegisterName(range->assigned_register()));
}
os << buffer;
position += std::min(prefix, max_prefix_length - 1);
......@@ -2071,7 +2072,8 @@ void LiveRangeBuilder::AddInitialIntervals(const InstructionBlock* block,
while (!iterator.Done()) {
int operand_index = iterator.Current();
TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
range->AddUseInterval(start, end, allocation_zone());
range->AddUseInterval(start, end, allocation_zone(),
data()->is_trace_alloc());
iterator.Advance();
}
}
......@@ -2192,16 +2194,18 @@ UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
if (range->IsEmpty() || range->Start() > position) {
// Can happen if there is a definition without use.
range->AddUseInterval(position, position.NextStart(), allocation_zone());
range->AddUsePosition(NewUsePosition(position.NextStart()));
range->AddUseInterval(position, position.NextStart(), allocation_zone(),
data()->is_trace_alloc());
range->AddUsePosition(NewUsePosition(position.NextStart()),
data()->is_trace_alloc());
} else {
range->ShortenTo(position);
range->ShortenTo(position, data()->is_trace_alloc());
}
if (!operand->IsUnallocated()) return nullptr;
UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
UsePosition* use_pos =
NewUsePosition(position, unalloc_operand, hint, hint_type);
range->AddUsePosition(use_pos);
range->AddUsePosition(use_pos, data()->is_trace_alloc());
return use_pos;
}
......@@ -2216,9 +2220,10 @@ UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
if (operand->IsUnallocated()) {
UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
range->AddUsePosition(use_pos);
range->AddUsePosition(use_pos, data()->is_trace_alloc());
}
range->AddUseInterval(block_start, position, allocation_zone());
range->AddUseInterval(block_start, position, allocation_zone(),
data()->is_trace_alloc());
return use_pos;
}
......@@ -2279,7 +2284,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
int code = config()->GetAllocatableGeneralCode(i);
TopLevelLiveRange* range = FixedLiveRangeFor(code, spill_mode);
range->AddUseInterval(curr_position, curr_position.End(),
allocation_zone());
allocation_zone(), data()->is_trace_alloc());
}
}
......@@ -2291,7 +2296,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
TopLevelLiveRange* range = FixedFPLiveRangeFor(
code, MachineRepresentation::kFloat64, spill_mode);
range->AddUseInterval(curr_position, curr_position.End(),
allocation_zone());
allocation_zone(), data()->is_trace_alloc());
}
// Clobber fixed float registers on archs with non-simple aliasing.
if (!kSimpleFPAliasing) {
......@@ -2304,7 +2309,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
TopLevelLiveRange* range = FixedFPLiveRangeFor(
code, MachineRepresentation::kFloat32, spill_mode);
range->AddUseInterval(curr_position, curr_position.End(),
allocation_zone());
allocation_zone(), data()->is_trace_alloc());
}
}
if (fixed_simd128_live_ranges) {
......@@ -2314,7 +2319,7 @@ void LiveRangeBuilder::ProcessInstructions(const InstructionBlock* block,
TopLevelLiveRange* range = FixedFPLiveRangeFor(
code, MachineRepresentation::kSimd128, spill_mode);
range->AddUseInterval(curr_position, curr_position.End(),
allocation_zone());
allocation_zone(), data()->is_trace_alloc());
}
}
}
......@@ -2574,7 +2579,8 @@ void LiveRangeBuilder::ProcessLoopHeader(const InstructionBlock* block,
while (!iterator.Done()) {
int operand_index = iterator.Current();
TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
range->EnsureInterval(start, end, allocation_zone());
range->EnsureInterval(start, end, allocation_zone(),
data()->is_trace_alloc());
iterator.Advance();
}
// Insert all values into the live in sets of all blocks in the loop.
......@@ -2773,7 +2779,7 @@ void BundleBuilder::BuildBundles() {
LiveRangeBundle* input_bundle = input_range->get_bundle();
if (input_bundle != nullptr) {
TRACE("Merge\n");
if (out->TryMerge(input_bundle))
if (out->TryMerge(input_bundle, data()->is_trace_alloc()))
TRACE("Merged %d and %d to %d\n", phi->virtual_register(), input,
out->id());
} else {
......@@ -2798,7 +2804,7 @@ bool LiveRangeBundle::TryAddRange(LiveRange* range) {
InsertUses(range->first_interval());
return true;
}
bool LiveRangeBundle::TryMerge(LiveRangeBundle* other) {
bool LiveRangeBundle::TryMerge(LiveRangeBundle* other, bool trace_alloc) {
if (other == this) return true;
auto iter1 = uses_.begin();
......@@ -2810,8 +2816,8 @@ bool LiveRangeBundle::TryMerge(LiveRangeBundle* other) {
} else if (iter2->start > iter1->end) {
++iter1;
} else {
TRACE("No merge %d:%d %d:%d\n", iter1->start, iter1->end, iter2->start,
iter2->end);
TRACE_COND(trace_alloc, "No merge %d:%d %d:%d\n", iter1->start,
iter1->end, iter2->start, iter2->end);
return false;
}
}
......@@ -3042,6 +3048,7 @@ void RegisterAllocator::Spill(LiveRange* range, SpillMode spill_mode) {
}
const char* RegisterAllocator::RegisterName(int register_code) const {
if (register_code == kUnassignedRegister) return "unassigned";
return mode() == GENERAL_REGISTERS
? i::RegisterName(Register::from_code(register_code))
: i::RegisterName(DoubleRegister::from_code(register_code));
......@@ -3408,7 +3415,7 @@ void LinearScanAllocator::ComputeStateFromManyPredecessors(
to_be_live->emplace(val.first, reg);
TRACE("Reset %d as live due vote %zu in %s\n",
val.first->TopLevel()->vreg(), val.second.count,
reg == kUnassignedRegister ? "unassigned" : RegisterName(reg));
RegisterName(reg));
}
}
};
......@@ -3576,7 +3583,7 @@ void LinearScanAllocator::AllocateRegisters() {
SplitAndSpillRangesDefinedByMemoryOperand();
data()->ResetSpillState();
if (FLAG_trace_alloc) {
if (data()->is_trace_alloc()) {
PrintRangeOverview(std::cout);
}
......@@ -3826,7 +3833,7 @@ void LinearScanAllocator::AllocateRegisters() {
ProcessCurrentRange(current, spill_mode);
}
if (FLAG_trace_alloc) {
if (data()->is_trace_alloc()) {
PrintRangeOverview(std::cout);
}
}
......@@ -5140,6 +5147,7 @@ void LiveRangeConnector::CommitSpillsInDeferredBlocks(
}
#undef TRACE
#undef TRACE_COND
} // namespace compiler
} // namespace internal
......
......@@ -175,7 +175,8 @@ std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
enum class RegisterAllocationFlag : unsigned {
kTurboControlFlowAwareAllocation = 1 << 0,
kTurboPreprocessRanges = 1 << 1
kTurboPreprocessRanges = 1 << 1,
kTraceAllocation = 1 << 2
};
using RegisterAllocationFlags = base::Flags<RegisterAllocationFlag>;
......@@ -198,6 +199,10 @@ class RegisterAllocationData final : public ZoneObject {
return flags_ & RegisterAllocationFlag::kTurboPreprocessRanges;
}
bool is_trace_alloc() {
return flags_ & RegisterAllocationFlag::kTraceAllocation;
}
static constexpr int kNumberOfFixedRangesPerRegister = 2;
class PhiMapValue : public ZoneObject {
......@@ -741,7 +746,7 @@ class LiveRangeBundle : public ZoneObject {
: ranges_(zone), uses_(zone), id_(id) {}
bool TryAddRange(LiveRange* range);
bool TryMerge(LiveRangeBundle* other);
bool TryMerge(LiveRangeBundle* other, bool trace_alloc);
ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
ZoneSet<Range, RangeOrdering> uses_;
......@@ -785,12 +790,14 @@ class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
SlotUseKind slot_use_kind() const { return HasSlotUseField::decode(bits_); }
// Add a new interval or a new use position to this live range.
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUsePosition(UsePosition* pos);
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
bool trace_alloc);
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
bool trace_alloc);
void AddUsePosition(UsePosition* pos, bool trace_alloc);
// Shorten the most recently added interval by setting a new start.
void ShortenTo(LifetimePosition start);
void ShortenTo(LifetimePosition start, bool trace_alloc);
// Detaches between start and end, and attributes the resulting range to
// result.
......
......@@ -3021,6 +3021,9 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
if (data->info()->is_turbo_preprocess_ranges()) {
flags |= RegisterAllocationFlag::kTurboPreprocessRanges;
}
if (data->info()->trace_turbo_allocation_enabled()) {
flags |= RegisterAllocationFlag::kTraceAllocation;
}
data->InitializeRegisterAllocationData(config, call_descriptor, flags);
if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
......
......@@ -485,7 +485,7 @@ DEFINE_BOOL(trace_turbo_trimming, false, "trace TurboFan's graph trimmer")
DEFINE_BOOL(trace_turbo_jt, false, "trace TurboFan's jump threading")
DEFINE_BOOL(trace_turbo_ceq, false, "trace TurboFan's control equivalence")
DEFINE_BOOL(trace_turbo_loop, false, "trace TurboFan's loop optimizations")
DEFINE_BOOL(trace_alloc, false, "trace register allocator")
DEFINE_BOOL(trace_turbo_alloc, false, "trace TurboFan's register allocator")
DEFINE_BOOL(trace_all_uses, false, "trace all use positions")
DEFINE_BOOL(trace_representation, false, "trace representation types")
DEFINE_BOOL(turbo_verify, DEBUG_BOOL, "verify TurboFan graphs at each phase")
......
......@@ -56,13 +56,13 @@ class TestRangeBuilder {
LifetimePosition start = LifetimePosition::FromInt(pair.first);
LifetimePosition end = LifetimePosition::FromInt(pair.second);
CHECK(start < end);
range->AddUseInterval(start, end, zone_);
range->AddUseInterval(start, end, zone_, FLAG_trace_turbo_alloc);
}
for (int pos : uses_) {
UsePosition* use_position =
new (zone_) UsePosition(LifetimePosition::FromInt(pos), nullptr,
nullptr, UsePositionHintType::kNone);
range->AddUsePosition(use_position);
range->AddUsePosition(use_position, FLAG_trace_turbo_alloc);
}
pairs_.clear();
......@@ -129,10 +129,10 @@ TEST_F(LiveRangeUnitTest, InvalidConstruction) {
// Build a range manually, because the builder guards against empty cases.
TopLevelLiveRange* range =
new (zone()) TopLevelLiveRange(1, MachineRepresentation::kTagged);
V8_ASSERT_DEBUG_DEATH(
range->AddUseInterval(LifetimePosition::FromInt(0),
LifetimePosition::FromInt(0), zone()),
".*");
V8_ASSERT_DEBUG_DEATH(range->AddUseInterval(LifetimePosition::FromInt(0),
LifetimePosition::FromInt(0),
zone(), FLAG_trace_turbo_alloc),
".*");
}
TEST_F(LiveRangeUnitTest, SplitInvalidStart) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment