Commit 13781060 authored by Sigurd Schneider's avatar Sigurd Schneider Committed by Commit Bot

[regalloc] Rename --trace-alloc to --trace-turbo-alloc

And make --trace-turbo-alloc honor --trace-turbo-filter

This is useful to filter out a specific compile job, e.g.
if mksnapshot is crashing it easily produces 5GB of logs
without filter.

TBR=bmeurer@chromium.org

Change-Id: Ic7dea0a4cef793b517d98ca2ba1f6ea6eeac63ea
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1521111
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62536}
parent e9d93bab
......@@ -242,6 +242,7 @@ void OptimizedCompilationInfo::SetTracingFlags(bool passes_filter) {
if (FLAG_trace_turbo) SetFlag(kTraceTurboJson);
if (FLAG_trace_turbo_graph) SetFlag(kTraceTurboGraph);
if (FLAG_trace_turbo_scheduled) SetFlag(kTraceTurboScheduled);
if (FLAG_trace_turbo_alloc) SetFlag(kTraceTurboAllocation);
if (FLAG_trace_heap_broker) SetFlag(kTraceHeapBroker);
}
......
......@@ -60,10 +60,11 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
kTraceTurboJson = 1 << 14,
kTraceTurboGraph = 1 << 15,
kTraceTurboScheduled = 1 << 16,
kTraceHeapBroker = 1 << 17,
kWasmRuntimeExceptionSupport = 1 << 18,
kTurboControlFlowAwareAllocation = 1 << 19,
kTurboPreprocessRanges = 1 << 20
kTraceTurboAllocation = 1 << 17,
kTraceHeapBroker = 1 << 18,
kWasmRuntimeExceptionSupport = 1 << 19,
kTurboControlFlowAwareAllocation = 1 << 20,
kTurboPreprocessRanges = 1 << 21
};
// Construct a compilation info for optimized compilation.
......@@ -190,6 +191,10 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
bool trace_turbo_graph_enabled() const { return GetFlag(kTraceTurboGraph); }
bool trace_turbo_allocation_enabled() const {
return GetFlag(kTraceTurboAllocation);
}
bool trace_turbo_scheduled_enabled() const {
return GetFlag(kTraceTurboScheduled);
}
......
......@@ -9,15 +9,16 @@ namespace v8 {
namespace internal {
namespace compiler {
#define TRACE(...) \
do { \
if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
#define TRACE_COND(cond, ...) \
do { \
if (cond) PrintF(__VA_ARGS__); \
} while (false)
namespace {
void CreateSplinter(TopLevelLiveRange* range, RegisterAllocationData* data,
LifetimePosition first_cut, LifetimePosition last_cut) {
LifetimePosition first_cut, LifetimePosition last_cut,
bool trace_alloc) {
DCHECK(!range->IsSplinter());
// We can ignore ranges that live solely in deferred blocks.
// If a range ends right at the end of a deferred block, it is marked by
......@@ -49,9 +50,10 @@ void CreateSplinter(TopLevelLiveRange* range, RegisterAllocationData* data,
range->SetSplinter(splinter);
}
Zone* zone = data->allocation_zone();
TRACE("creating splinter %d for range %d between %d and %d\n",
range->splinter()->vreg(), range->vreg(), start.ToInstructionIndex(),
end.ToInstructionIndex());
TRACE_COND(trace_alloc,
"creating splinter %d for range %d between %d and %d\n",
range->splinter()->vreg(), range->vreg(),
start.ToInstructionIndex(), end.ToInstructionIndex());
range->Splinter(start, end, zone);
}
}
......@@ -102,7 +104,8 @@ void SplinterLiveRange(TopLevelLiveRange* range, RegisterAllocationData* data) {
current_block->last_instruction_index());
} else {
if (first_cut.IsValid()) {
CreateSplinter(range, data, first_cut, last_cut);
CreateSplinter(range, data, first_cut, last_cut,
data->is_trace_alloc());
first_cut = LifetimePosition::Invalid();
last_cut = LifetimePosition::Invalid();
}
......@@ -116,7 +119,8 @@ void SplinterLiveRange(TopLevelLiveRange* range, RegisterAllocationData* data) {
// have to connect blocks anyway, so we can also splinter to the end of the
// block, too.
if (first_cut.IsValid()) {
CreateSplinter(range, data, first_cut, interval_end);
CreateSplinter(range, data, first_cut, interval_end,
data->is_trace_alloc());
first_cut = LifetimePosition::Invalid();
last_cut = LifetimePosition::Invalid();
}
......@@ -186,7 +190,7 @@ void LiveRangeMerger::Merge() {
}
}
#undef TRACE
#undef TRACE_COND
} // namespace compiler
} // namespace internal
......
This diff is collapsed.
......@@ -175,7 +175,8 @@ std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
enum class RegisterAllocationFlag : unsigned {
kTurboControlFlowAwareAllocation = 1 << 0,
kTurboPreprocessRanges = 1 << 1
kTurboPreprocessRanges = 1 << 1,
kTraceAllocation = 1 << 2
};
using RegisterAllocationFlags = base::Flags<RegisterAllocationFlag>;
......@@ -198,6 +199,10 @@ class RegisterAllocationData final : public ZoneObject {
return flags_ & RegisterAllocationFlag::kTurboPreprocessRanges;
}
bool is_trace_alloc() {
return flags_ & RegisterAllocationFlag::kTraceAllocation;
}
static constexpr int kNumberOfFixedRangesPerRegister = 2;
class PhiMapValue : public ZoneObject {
......@@ -741,7 +746,7 @@ class LiveRangeBundle : public ZoneObject {
: ranges_(zone), uses_(zone), id_(id) {}
bool TryAddRange(LiveRange* range);
bool TryMerge(LiveRangeBundle* other);
bool TryMerge(LiveRangeBundle* other, bool trace_alloc);
ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
ZoneSet<Range, RangeOrdering> uses_;
......@@ -785,12 +790,14 @@ class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
SlotUseKind slot_use_kind() const { return HasSlotUseField::decode(bits_); }
// Add a new interval or a new use position to this live range.
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
void AddUsePosition(UsePosition* pos);
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
bool trace_alloc);
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
bool trace_alloc);
void AddUsePosition(UsePosition* pos, bool trace_alloc);
// Shorten the most recently added interval by setting a new start.
void ShortenTo(LifetimePosition start);
void ShortenTo(LifetimePosition start, bool trace_alloc);
// Detaches between start and end, and attributes the resulting range to
// result.
......
......@@ -3021,6 +3021,9 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
if (data->info()->is_turbo_preprocess_ranges()) {
flags |= RegisterAllocationFlag::kTurboPreprocessRanges;
}
if (data->info()->trace_turbo_allocation_enabled()) {
flags |= RegisterAllocationFlag::kTraceAllocation;
}
data->InitializeRegisterAllocationData(config, call_descriptor, flags);
if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
......
......@@ -485,7 +485,7 @@ DEFINE_BOOL(trace_turbo_trimming, false, "trace TurboFan's graph trimmer")
DEFINE_BOOL(trace_turbo_jt, false, "trace TurboFan's jump threading")
DEFINE_BOOL(trace_turbo_ceq, false, "trace TurboFan's control equivalence")
DEFINE_BOOL(trace_turbo_loop, false, "trace TurboFan's loop optimizations")
DEFINE_BOOL(trace_alloc, false, "trace register allocator")
DEFINE_BOOL(trace_turbo_alloc, false, "trace TurboFan's register allocator")
DEFINE_BOOL(trace_all_uses, false, "trace all use positions")
DEFINE_BOOL(trace_representation, false, "trace representation types")
DEFINE_BOOL(turbo_verify, DEBUG_BOOL, "verify TurboFan graphs at each phase")
......
......@@ -56,13 +56,13 @@ class TestRangeBuilder {
LifetimePosition start = LifetimePosition::FromInt(pair.first);
LifetimePosition end = LifetimePosition::FromInt(pair.second);
CHECK(start < end);
range->AddUseInterval(start, end, zone_);
range->AddUseInterval(start, end, zone_, FLAG_trace_turbo_alloc);
}
for (int pos : uses_) {
UsePosition* use_position =
new (zone_) UsePosition(LifetimePosition::FromInt(pos), nullptr,
nullptr, UsePositionHintType::kNone);
range->AddUsePosition(use_position);
range->AddUsePosition(use_position, FLAG_trace_turbo_alloc);
}
pairs_.clear();
......@@ -129,10 +129,10 @@ TEST_F(LiveRangeUnitTest, InvalidConstruction) {
// Build a range manually, because the builder guards against empty cases.
TopLevelLiveRange* range =
new (zone()) TopLevelLiveRange(1, MachineRepresentation::kTagged);
V8_ASSERT_DEBUG_DEATH(
range->AddUseInterval(LifetimePosition::FromInt(0),
LifetimePosition::FromInt(0), zone()),
".*");
V8_ASSERT_DEBUG_DEATH(range->AddUseInterval(LifetimePosition::FromInt(0),
LifetimePosition::FromInt(0),
zone(), FLAG_trace_turbo_alloc),
".*");
}
TEST_F(LiveRangeUnitTest, SplitInvalidStart) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment