Commit e9d4f79f authored by Clemens Backes's avatar Clemens Backes Committed by V8 LUCI CQ

[logging] Use v8_flags for accessing flag values

Avoid the deprecated FLAG_* syntax, access flag values via the
{v8_flags} struct instead.

R=mliedtke@chromium.org

Bug: v8:12887
Change-Id: I36c66465e3b6c1b27c1825e50f17f4bc8557c426
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3898936Reviewed-by: 's avatarMatthias Liedtke <mliedtke@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83217}
parent 5f2ed43a
......@@ -367,7 +367,7 @@ class V8_NODISCARD AggregatedHistogramTimerScope {
// AggretatedMemoryHistogram collects (time, value) sample pairs and turns
// them into time-uniform samples for the backing historgram, such that the
// backing histogram receives one sample every T ms, where the T is controlled
// by the FLAG_histogram_interval.
// by the v8_flags.histogram_interval.
//
// More formally: let F be a real-valued function that maps time to sample
// values. We define F as a linear interpolation between adjacent samples. For
......@@ -388,7 +388,7 @@ class AggregatedMemoryHistogram {
// 1) For we processed samples that came in before start_ms_ and sent the
// corresponding aggregated samples to backing histogram.
// 2) (last_ms_, last_value_) is the last received sample.
// 3) last_ms_ < start_ms_ + FLAG_histogram_interval.
// 3) last_ms_ < start_ms_ + v8_flags.histogram_interval.
// 4) aggregate_value_ is the average of the function that is constructed by
// linearly interpolating samples received between start_ms_ and last_ms_.
void AddSample(double current_ms, double current_value);
......@@ -429,7 +429,7 @@ void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms,
// Two samples have the same time, remember the last one.
last_value_ = current_value;
} else {
double sample_interval_ms = FLAG_histogram_interval;
double sample_interval_ms = v8_flags.histogram_interval;
double end_ms = start_ms_ + sample_interval_ms;
if (end_ms <= current_ms + kEpsilon) {
// Linearly interpolate between the last_ms_ and the current_ms.
......@@ -520,10 +520,10 @@ class Counters : public std::enable_shared_from_this<Counters> {
NESTED_TIMED_HISTOGRAM_LIST(HT)
#undef HT
#define HT(name, caption, max, res) \
NestedTimedHistogram* name() { \
name##_.EnsureCreated(FLAG_slow_histograms); \
return &name##_; \
#define HT(name, caption, max, res) \
NestedTimedHistogram* name() { \
name##_.EnsureCreated(v8_flags.slow_histograms); \
return &name##_; \
}
NESTED_TIMED_HISTOGRAM_LIST_SLOW(HT)
#undef HT
......
......@@ -28,7 +28,7 @@ const char* const LogFile::kLogToConsole = "-";
// static
FILE* LogFile::CreateOutputHandle(std::string file_name) {
// If we're logging anything, we need to open the log file.
if (!FLAG_log) {
if (!v8_flags.log) {
return nullptr;
} else if (LogFile::IsLoggingToConsole(file_name)) {
return stdout;
......
This diff is collapsed.
......@@ -70,9 +70,9 @@ class SourcePosition;
class Ticker;
#undef LOG
#define LOG(isolate, Call) \
do { \
if (v8::internal::FLAG_log) (isolate)->v8_file_logger()->Call; \
#define LOG(isolate, Call) \
do { \
if (v8::internal::v8_flags.log) (isolate)->v8_file_logger()->Call; \
} while (false)
#define LOG_CODE_EVENT(isolate, Call) \
......@@ -299,7 +299,7 @@ class V8FileLogger : public LogEventListener {
void TickEvent(TickSample* sample, bool overflow);
void RuntimeCallTimerEvent();
// Logs a StringEvent regardless of whether FLAG_log is true.
// Logs a StringEvent regardless of whether v8_flags.log is true.
void UncheckedStringEvent(const char* name, const char* value);
// Logs a scripts sources. Keeps track of all logged scripts to ensure that
......
......@@ -157,7 +157,7 @@ RuntimeCallStats::RuntimeCallStats(ThreadType thread_type)
for (int i = 0; i < kNumberOfCounters; i++) {
this->counters_[i] = RuntimeCallCounter(kNames[i]);
}
if (FLAG_rcs_cpu_time) {
if (v8_flags.rcs_cpu_time) {
CHECK(base::ThreadTicks::IsSupported());
base::ThreadTicks::WaitUntilInitialized();
RuntimeCallTimer::Now = &RuntimeCallTimer::NowCPUTime;
......
......@@ -13,8 +13,8 @@ namespace v8 {
namespace internal {
// This struct contains a set of flags that can be modified from multiple
// threads at runtime unlike the normal FLAG_-like flags which are not modified
// after V8 instance is initialized.
// threads at runtime unlike the normal v8_flags.-like flags which are not
// modified after V8 instance is initialized.
struct TracingFlags {
static V8_EXPORT_PRIVATE std::atomic_uint runtime_stats;
......
......@@ -75,7 +75,7 @@ class SnapshotNativeCounterTest : public TestWithNativeContextAndCounters {
} // namespace
TEST_F(AggregatedMemoryHistogramTest, OneSample1) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 1000);
AddSample(20, 1000);
EXPECT_EQ(1U, samples()->size());
......@@ -83,7 +83,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample1) {
}
TEST_F(AggregatedMemoryHistogramTest, OneSample2) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 500);
AddSample(20, 1000);
EXPECT_EQ(1U, samples()->size());
......@@ -91,7 +91,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample2) {
}
TEST_F(AggregatedMemoryHistogramTest, OneSample3) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 500);
AddSample(15, 500);
AddSample(15, 1000);
......@@ -101,7 +101,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample3) {
}
TEST_F(AggregatedMemoryHistogramTest, OneSample4) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 500);
AddSample(15, 750);
AddSample(20, 1000);
......@@ -110,7 +110,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample4) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples1) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 1000);
AddSample(30, 1000);
EXPECT_EQ(2U, samples()->size());
......@@ -119,7 +119,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples1) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples2) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 1000);
AddSample(20, 1000);
AddSample(30, 1000);
......@@ -129,7 +129,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples2) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples3) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 1000);
AddSample(20, 1000);
AddSample(20, 500);
......@@ -140,7 +140,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples3) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples4) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 1000);
AddSample(30, 0);
EXPECT_EQ(2U, samples()->size());
......@@ -149,7 +149,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples4) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples5) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 0);
AddSample(30, 1000);
EXPECT_EQ(2U, samples()->size());
......@@ -158,7 +158,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples5) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples6) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 0);
AddSample(15, 1000);
AddSample(30, 1000);
......@@ -168,7 +168,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples6) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples7) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 0);
AddSample(15, 1000);
AddSample(25, 0);
......@@ -179,7 +179,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples7) {
}
TEST_F(AggregatedMemoryHistogramTest, TwoSamples8) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
AddSample(10, 1000);
AddSample(15, 0);
AddSample(25, 1000);
......@@ -190,7 +190,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples8) {
}
TEST_F(AggregatedMemoryHistogramTest, ManySamples1) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
const int kMaxSamples = 1000;
AddSample(0, 0);
AddSample(10 * kMaxSamples, 10 * kMaxSamples);
......@@ -201,7 +201,7 @@ TEST_F(AggregatedMemoryHistogramTest, ManySamples1) {
}
TEST_F(AggregatedMemoryHistogramTest, ManySamples2) {
FLAG_histogram_interval = 10;
v8_flags.histogram_interval = 10;
const int kMaxSamples = 1000;
AddSample(0, 0);
AddSample(10 * (2 * kMaxSamples), 10 * (2 * kMaxSamples));
......@@ -214,7 +214,7 @@ TEST_F(AggregatedMemoryHistogramTest, ManySamples2) {
TEST_F(SnapshotNativeCounterTest, WriteBarrier) {
RunJS("let o = {a: 42};");
if (!FLAG_single_generation && SupportsNativeCounters()) {
if (!v8_flags.single_generation && SupportsNativeCounters()) {
EXPECT_NE(0, write_barriers());
} else {
EXPECT_EQ(0, write_barriers());
......
......@@ -57,11 +57,11 @@ namespace {
class LogTest : public TestWithIsolate {
public:
static void SetUpTestSuite() {
i::FLAG_log = true;
i::FLAG_prof = true;
i::FLAG_log_code = true;
i::FLAG_logfile = i::LogFile::kLogToTemporaryFile;
i::FLAG_logfile_per_isolate = false;
i::v8_flags.log = true;
i::v8_flags.prof = true;
i::v8_flags.log_code = true;
i::v8_flags.logfile = i::LogFile::kLogToTemporaryFile;
i::v8_flags.logfile_per_isolate = false;
TestWithIsolate::SetUpTestSuite();
}
};
......@@ -498,11 +498,11 @@ TEST_F(LogTest, Issue539892) {
class LogAllTest : public LogTest {
public:
static void SetUpTestSuite() {
i::FLAG_log_all = true;
i::FLAG_log_deopt = true;
i::FLAG_turbo_inlining = false;
i::FLAG_log_internal_timer_events = true;
i::FLAG_allow_natives_syntax = true;
i::v8_flags.log_all = true;
i::v8_flags.log_deopt = true;
i::v8_flags.turbo_inlining = false;
i::v8_flags.log_internal_timer_events = true;
i::v8_flags.allow_natives_syntax = true;
LogTest::SetUpTestSuite();
}
};
......@@ -543,7 +543,7 @@ TEST_F(LogAllTest, LogAll) {
CHECK(logger.ContainsLine({"code-creation,Script", ":1:1"}));
CHECK(logger.ContainsLine({"code-creation,JS,", "testAddFn"}));
if (i::FLAG_turbofan && !i::FLAG_always_turbofan) {
if (i::v8_flags.turbofan && !i::v8_flags.always_turbofan) {
CHECK(logger.ContainsLine({"code-deopt,", "not a Smi"}));
CHECK(logger.ContainsLine({"timer-event-start", "V8.DeoptimizeCode"}));
CHECK(logger.ContainsLine({"timer-event-end", "V8.DeoptimizeCode"}));
......@@ -554,7 +554,7 @@ TEST_F(LogAllTest, LogAll) {
class LogInterpretedFramesNativeStackTest : public LogTest {
public:
static void SetUpTestSuite() {
i::FLAG_interpreted_frames_native_stack = true;
i::v8_flags.interpreted_frames_native_stack = true;
LogTest::SetUpTestSuite();
}
};
......@@ -583,13 +583,13 @@ class LogInterpretedFramesNativeStackWithSerializationTest
: array_buffer_allocator_(
v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {}
static void SetUpTestSuite() {
i::FLAG_log = true;
i::FLAG_prof = true;
i::FLAG_log_code = true;
i::FLAG_logfile = i::LogFile::kLogToTemporaryFile;
i::FLAG_logfile_per_isolate = false;
i::FLAG_interpreted_frames_native_stack = true;
i::FLAG_always_turbofan = false;
i::v8_flags.log = true;
i::v8_flags.prof = true;
i::v8_flags.log_code = true;
i::v8_flags.logfile = i::LogFile::kLogToTemporaryFile;
i::v8_flags.logfile_per_isolate = false;
i::v8_flags.interpreted_frames_native_stack = true;
i::v8_flags.always_turbofan = false;
TestWithPlatform::SetUpTestSuite();
}
......@@ -673,8 +673,8 @@ TEST_F(LogInterpretedFramesNativeStackWithSerializationTest,
class LogExternalLogEventListenerTest : public TestWithIsolate {
public:
static void SetUpTestSuite() {
i::FLAG_log = false;
i::FLAG_prof = false;
i::v8_flags.log = false;
i::v8_flags.prof = false;
TestWithIsolate::SetUpTestSuite();
}
};
......@@ -728,8 +728,8 @@ class LogExternalLogEventListenerInnerFunctionTest : public TestWithPlatform {
: array_buffer_allocator_(
v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {}
static void SetUpTestSuite() {
i::FLAG_log = false;
i::FLAG_prof = false;
i::v8_flags.log = false;
i::v8_flags.prof = false;
TestWithPlatform::SetUpTestSuite();
}
......@@ -771,11 +771,11 @@ TEST_F(LogExternalLogEventListenerInnerFunctionTest,
v8::ScriptCompiler::CompileUnboundScript(isolate1, &source)
.ToLocalChecked();
CHECK_EQ(code_event_handler.CountLines("Function", "f1"),
1 + (i::FLAG_stress_background_compile ? 1 : 0) +
(i::FLAG_always_sparkplug ? 1 : 0));
1 + (i::v8_flags.stress_background_compile ? 1 : 0) +
(i::v8_flags.always_sparkplug ? 1 : 0));
CHECK_EQ(code_event_handler.CountLines("Function", "f2"),
1 + (i::FLAG_stress_background_compile ? 1 : 0) +
(i::FLAG_always_sparkplug ? 1 : 0));
1 + (i::v8_flags.stress_background_compile ? 1 : 0) +
(i::v8_flags.always_sparkplug ? 1 : 0));
cache = v8::ScriptCompiler::CreateCodeCache(script);
}
isolate1->Dispose();
......@@ -811,9 +811,9 @@ TEST_F(LogExternalLogEventListenerInnerFunctionTest,
class LogExternalInterpretedFramesNativeStackTest : public TestWithIsolate {
public:
static void SetUpTestSuite() {
i::FLAG_log = false;
i::FLAG_prof = false;
i::FLAG_interpreted_frames_native_stack = true;
i::v8_flags.log = false;
i::v8_flags.prof = false;
i::v8_flags.interpreted_frames_native_stack = true;
TestWithIsolate::SetUpTestSuite();
}
};
......@@ -863,7 +863,7 @@ TEST_F(LogExternalInterpretedFramesNativeStackTest,
class LogMapsTest : public LogTest {
public:
static void SetUpTestSuite() {
i::FLAG_log_maps = true;
i::v8_flags.log_maps = true;
LogTest::SetUpTestSuite();
}
};
......@@ -949,8 +949,9 @@ void ValidateMapDetailsLogging(v8::Isolate* isolate,
TEST_F(LogMapsTest, LogMapsDetailsStartup) {
// Reusing map addresses might cause these tests to fail.
if (i::FLAG_gc_global || i::FLAG_stress_compaction ||
i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) {
if (i::v8_flags.gc_global || i::v8_flags.stress_compaction ||
i::v8_flags.stress_incremental_marking ||
i::v8_flags.enable_third_party_heap) {
return;
}
// Test that all Map details from Maps in the snapshot are logged properly.
......@@ -964,16 +965,17 @@ TEST_F(LogMapsTest, LogMapsDetailsStartup) {
class LogMapsCodeTest : public LogTest {
public:
static void SetUpTestSuite() {
i::FLAG_retain_maps_for_n_gc = 0xFFFFFFF;
i::FLAG_log_maps = true;
i::v8_flags.retain_maps_for_n_gc = 0xFFFFFFF;
i::v8_flags.log_maps = true;
LogTest::SetUpTestSuite();
}
};
TEST_F(LogMapsCodeTest, LogMapsDetailsCode) {
// Reusing map addresses might cause these tests to fail.
if (i::FLAG_gc_global || i::FLAG_stress_compaction ||
i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) {
if (i::v8_flags.gc_global || i::v8_flags.stress_compaction ||
i::v8_flags.stress_incremental_marking ||
i::v8_flags.enable_third_party_heap) {
return;
}
......@@ -1061,8 +1063,9 @@ TEST_F(LogMapsCodeTest, LogMapsDetailsCode) {
TEST_F(LogMapsTest, LogMapsDetailsContexts) {
// Reusing map addresses might cause these tests to fail.
if (i::FLAG_gc_global || i::FLAG_stress_compaction ||
i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) {
if (i::v8_flags.gc_global || i::v8_flags.stress_compaction ||
i::v8_flags.stress_incremental_marking ||
i::v8_flags.enable_third_party_heap) {
return;
}
// Test that all Map details from Maps in the snapshot are logged properly.
......@@ -1132,7 +1135,7 @@ class LogFunctionEventsTest : public LogTest {
TEST_F(LogFunctionEventsTest, LogFunctionEvents) {
// --always-turbofan will break the fine-grained log order.
if (i::FLAG_always_turbofan) return;
if (i::v8_flags.always_turbofan) return;
{
ScopedLoggerInitializer logger(isolate());
......
......@@ -460,8 +460,8 @@ static void CustomCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
} // namespace
TEST_F(RuntimeCallStatsTest, CallbackFunction) {
FLAG_allow_natives_syntax = true;
FLAG_incremental_marking = false;
v8_flags.allow_natives_syntax = true;
v8_flags.incremental_marking = false;
RuntimeCallCounter* callback_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback);
......@@ -539,8 +539,8 @@ TEST_F(RuntimeCallStatsTest, CallbackFunction) {
}
TEST_F(RuntimeCallStatsTest, ApiGetter) {
FLAG_allow_natives_syntax = true;
FLAG_incremental_marking = false;
v8_flags.allow_natives_syntax = true;
v8_flags.incremental_marking = false;
RuntimeCallCounter* callback_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback);
......@@ -627,12 +627,12 @@ TEST_F(RuntimeCallStatsTest, ApiGetter) {
}
TEST_F(RuntimeCallStatsTest, GarbageCollection) {
if (FLAG_stress_incremental_marking) return;
FLAG_expose_gc = true;
if (v8_flags.stress_incremental_marking) return;
v8_flags.expose_gc = true;
// Disable concurrent GC threads because otherwise they may continue
// running after this test completes and race with is_runtime_stats_enabled()
// updates.
FLAG_single_threaded_gc = true;
v8_flags.single_threaded_gc = true;
FlagList::EnforceFlagImplications();
v8::Isolate* isolate = v8_isolate();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment