Commit 588688f3 authored by Clemens Backes's avatar Clemens Backes Committed by V8 LUCI CQ

[profiler] Use v8_flags for accessing flag values

Avoid the deprecated FLAG_* syntax, access flag values via the
{v8_flags} struct instead.

R=cbruni@chromium.org

Bug: v8:12887
Change-Id: I7e828480e9cc919609dac69df89315c6fdc82dff
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3899296Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83228}
parent 7db6d76e
......@@ -90,7 +90,7 @@ ProfilingScope::ProfilingScope(Isolate* isolate, ProfilerListener* listener)
// callbacks on the heap.
DCHECK(isolate_->heap()->HasBeenSetUp());
if (!FLAG_prof_browser_mode) {
if (!v8_flags.prof_browser_mode) {
logger->LogCodeObjects();
}
logger->LogCompiledFunctions();
......@@ -511,7 +511,7 @@ CpuProfiler::CpuProfiler(Isolate* isolate, CpuProfilingNamingMode naming_mode,
naming_mode_(naming_mode),
logging_mode_(logging_mode),
base_sampling_interval_(base::TimeDelta::FromMicroseconds(
FLAG_cpu_profiler_sampling_interval)),
v8_flags.cpu_profiler_sampling_interval)),
code_observer_(test_code_observer),
profiles_(test_profiles),
symbolizer_(test_symbolizer),
......
......@@ -547,7 +547,7 @@ bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
// Size of an object can change during its life, so to keep information
// about the object in entries_ consistent, we have to adjust size when the
// object is migrated.
if (FLAG_heap_profiler_trace_objects) {
if (v8_flags.heap_profiler_trace_objects) {
PrintF("Move object from %p to %p old size %6d new size %6d\n",
reinterpret_cast<void*>(from), reinterpret_cast<void*>(to),
entries_.at(from_entry_info_index).size, object_size);
......@@ -586,7 +586,7 @@ SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
EntryInfo& entry_info = entries_.at(entry_index);
entry_info.accessed = accessed;
if (FLAG_heap_profiler_trace_objects) {
if (v8_flags.heap_profiler_trace_objects) {
PrintF("Update object size : %p with old size %d and new size %d\n",
reinterpret_cast<void*>(addr), entry_info.size, size);
}
......@@ -622,7 +622,7 @@ void HeapObjectsMap::AddMergedNativeEntry(NativeObject addr,
void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); }
void HeapObjectsMap::UpdateHeapObjectsMap() {
if (FLAG_heap_profiler_trace_objects) {
if (v8_flags.heap_profiler_trace_objects) {
PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
entries_map_.occupancy());
}
......@@ -634,14 +634,14 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
obj = iterator.Next()) {
int object_size = obj.Size(cage_base);
FindOrAddEntry(obj.address(), object_size);
if (FLAG_heap_profiler_trace_objects) {
if (v8_flags.heap_profiler_trace_objects) {
PrintF("Update object : %p %6d. Next address is %p\n",
reinterpret_cast<void*>(obj.address()), object_size,
reinterpret_cast<void*>(obj.address() + object_size));
}
}
RemoveDeadEntries();
if (FLAG_heap_profiler_trace_objects) {
if (v8_flags.heap_profiler_trace_objects) {
PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
entries_map_.occupancy());
}
......@@ -877,7 +877,8 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) {
HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type,
const char* name) {
if (FLAG_heap_profiler_show_hidden_objects && type == HeapEntry::kHidden) {
if (v8_flags.heap_profiler_show_hidden_objects &&
type == HeapEntry::kHidden) {
type = HeapEntry::kNative;
}
PtrComprCageBase cage_base(isolate());
......@@ -2094,7 +2095,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
// objects, and fails DCHECKs if we attempt to. Read-only objects can
// never retain read-write objects, so there is no risk in skipping
// verification for them.
if (FLAG_heap_snapshot_verify &&
if (v8_flags.heap_snapshot_verify &&
!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
verifier = std::make_unique<HeapEntryVerifier>(generator, obj);
}
......@@ -2643,7 +2644,7 @@ bool NativeObjectsExplorer::IterateAndExtractReferences(
HeapSnapshotGenerator* generator) {
generator_ = generator;
if (FLAG_heap_profiler_use_embedder_graph &&
if (v8_flags.heap_profiler_use_embedder_graph &&
snapshot_->profiler()->HasBuildEmbedderGraphCallback()) {
v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
DisallowGarbageCollection no_gc;
......@@ -2726,7 +2727,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
#ifdef VERIFY_HEAP
Heap* debug_heap = heap_;
if (FLAG_verify_heap) {
if (v8_flags.verify_heap) {
HeapVerifier::VerifyHeap(debug_heap);
}
#endif
......@@ -2734,7 +2735,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
InitProgressCounter();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
if (v8_flags.verify_heap) {
HeapVerifier::VerifyHeap(debug_heap);
}
#endif
......
......@@ -591,7 +591,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
HeapEntry* result =
entries_map_.emplace(ptr, allocator->AllocateEntry(ptr)).first->second;
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
if (FLAG_heap_snapshot_verify) {
if (v8_flags.heap_snapshot_verify) {
reverse_entries_map_.emplace(result, ptr);
}
#endif
......@@ -602,7 +602,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
HeapThing FindHeapThingForHeapEntry(HeapEntry* entry) {
// The reverse lookup map is only populated if the verification flag is
// enabled.
DCHECK(FLAG_heap_snapshot_verify);
DCHECK(v8_flags.heap_snapshot_verify);
auto it = reverse_entries_map_.find(entry);
return it == reverse_entries_map_.end() ? nullptr : it->second;
......
......@@ -25,7 +25,7 @@ namespace internal {
// Let u be a uniformly distributed random number between 0 and 1, then
// next_sample = (- ln u) / λ
intptr_t SamplingHeapProfiler::Observer::GetNextSampleInterval(uint64_t rate) {
if (FLAG_sampling_heap_profiler_suppress_randomness)
if (v8_flags.sampling_heap_profiler_suppress_randomness)
return static_cast<intptr_t>(rate);
double u = random_->NextDouble();
double next = (-base::ieee754::log(u)) * rate;
......
......@@ -81,8 +81,8 @@ const char* StringsStorage::GetSymbol(Symbol sym) {
return "<symbol>";
}
String description = String::cast(sym.description());
int length =
std::min(FLAG_heap_snapshot_string_limit.value(), description.length());
int length = std::min(v8_flags.heap_snapshot_string_limit.value(),
description.length());
auto data = description.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0,
length, &length);
if (sym.is_private_name()) {
......@@ -98,7 +98,7 @@ const char* StringsStorage::GetName(Name name) {
if (name.IsString()) {
String str = String::cast(name);
int length =
std::min(FLAG_heap_snapshot_string_limit.value(), str.length());
std::min(v8_flags.heap_snapshot_string_limit.value(), str.length());
int actual_length = 0;
std::unique_ptr<char[]> data = str.ToCString(
DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length);
......@@ -117,7 +117,7 @@ const char* StringsStorage::GetConsName(const char* prefix, Name name) {
if (name.IsString()) {
String str = String::cast(name);
int length =
std::min(FLAG_heap_snapshot_string_limit.value(), str.length());
std::min(v8_flags.heap_snapshot_string_limit.value(), str.length());
int actual_length = 0;
std::unique_ptr<char[]> data = str.ToCString(
DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length);
......
......@@ -161,7 +161,7 @@ Symbolizer::SymbolizedSample Symbolizer::SymbolizeTickSample(
}
}
if (FLAG_prof_browser_mode) {
if (v8_flags.prof_browser_mode) {
bool no_symbolized_entries = true;
for (auto e : stack_trace) {
if (e.code_entry != nullptr) {
......
This diff is collapsed.
......@@ -53,10 +53,10 @@ using i::AllocationTraceNode;
using i::AllocationTraceTree;
using i::AllocationTracker;
using i::SourceLocation;
using i::heap::GrowNewSpaceToMaximumCapacity;
using v8::base::ArrayVector;
using v8::base::Optional;
using v8::base::Vector;
using v8::internal::heap::GrowNewSpaceToMaximumCapacity;
namespace {
......@@ -1297,7 +1297,7 @@ static TestStatsStream GetHeapStatsUpdate(
TEST(HeapSnapshotObjectsStats) {
// Concurrent allocation might break results
v8::internal::v8_flags.stress_concurrent_allocation = false;
i::v8_flags.stress_concurrent_allocation = false;
LocalContext env;
v8::HandleScope scope(env->GetIsolate());
......@@ -2639,7 +2639,7 @@ TEST(ManyLocalsInSharedContext) {
env->GetIsolate(), ok_object, v8::HeapGraphEdge::kInternal, "context");
CHECK(context_object);
// Check the objects are not duplicated in the context.
CHECK_EQ(v8::internal::Context::MIN_CONTEXT_EXTENDED_SLOTS + num_objects - 1,
CHECK_EQ(i::Context::MIN_CONTEXT_EXTENDED_SLOTS + num_objects - 1,
context_object->GetChildrenCount());
// Check all the objects have got their names.
// ... well check just every 15th because otherwise it's too slow in debug.
......@@ -2695,7 +2695,7 @@ TEST(AllocationSitesAreVisible) {
v8::HeapGraphEdge::kInternal, "elements");
CHECK(elements);
CHECK_EQ(v8::HeapGraphNode::kCode, elements->GetType());
CHECK_EQ(v8::internal::FixedArray::SizeFor(3),
CHECK_EQ(i::FixedArray::SizeFor(3),
static_cast<int>(elements->GetShallowSize()));
v8::Local<v8::Value> array_val =
......@@ -3704,10 +3704,10 @@ TEST(SamplingHeapProfiler) {
// Turn off always_turbofan. Inlining can cause stack traces to be shorter
// than what we expect in this test.
v8::internal::v8_flags.always_turbofan = false;
i::v8_flags.always_turbofan = false;
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
// Sample should be empty if requested before sampling has started.
{
......@@ -3788,16 +3788,16 @@ TEST(SamplingHeapProfilerRateAgnosticEstimates) {
// Turn off always_turbofan. Inlining can cause stack traces to be shorter
// than what we expect in this test.
v8::internal::v8_flags.always_turbofan = false;
i::v8_flags.always_turbofan = false;
// Disable compilation cache to force compilation in both cases
v8::internal::v8_flags.compilation_cache = false;
i::v8_flags.compilation_cache = false;
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
// stress_incremental_marking adds randomness to the test.
v8::internal::v8_flags.stress_incremental_marking = false;
i::v8_flags.stress_incremental_marking = false;
// warmup compilation
CompileRun(simple_sampling_heap_profiler_script);
......@@ -3869,7 +3869,7 @@ TEST(SamplingHeapProfilerApiAllocation) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(256);
......@@ -3892,7 +3892,7 @@ TEST(SamplingHeapProfilerApiSamples) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(1024);
......@@ -3937,7 +3937,7 @@ TEST(SamplingHeapProfilerLeftTrimming) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(64);
......@@ -3950,7 +3950,7 @@ TEST(SamplingHeapProfilerLeftTrimming) {
" a.shift();\n"
"}\n");
CcTest::CollectGarbage(v8::internal::NEW_SPACE);
CcTest::CollectGarbage(i::NEW_SPACE);
// Should not crash.
heap_profiler->StopSamplingHeapProfiler();
......@@ -3975,7 +3975,7 @@ TEST(SamplingHeapProfilerPretenuredInlineAllocations) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
GrowNewSpaceToMaximumCapacity(CcTest::heap());
......@@ -4037,7 +4037,7 @@ TEST(SamplingHeapProfilerLargeInterval) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(512 * 1024);
......@@ -4075,7 +4075,7 @@ TEST(SamplingHeapProfilerSampleDuringDeopt) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true;
i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
// Small sample interval to force each object to be sampled.
heap_profiler->StartSamplingHeapProfiler(i::kTaggedSize);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment