Commit 588688f3 authored by Clemens Backes's avatar Clemens Backes Committed by V8 LUCI CQ

[profiler] Use v8_flags for accessing flag values

Avoid the deprecated FLAG_* syntax, access flag values via the
{v8_flags} struct instead.

R=cbruni@chromium.org

Bug: v8:12887
Change-Id: I7e828480e9cc919609dac69df89315c6fdc82dff
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3899296Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83228}
parent 7db6d76e
...@@ -90,7 +90,7 @@ ProfilingScope::ProfilingScope(Isolate* isolate, ProfilerListener* listener) ...@@ -90,7 +90,7 @@ ProfilingScope::ProfilingScope(Isolate* isolate, ProfilerListener* listener)
// callbacks on the heap. // callbacks on the heap.
DCHECK(isolate_->heap()->HasBeenSetUp()); DCHECK(isolate_->heap()->HasBeenSetUp());
if (!FLAG_prof_browser_mode) { if (!v8_flags.prof_browser_mode) {
logger->LogCodeObjects(); logger->LogCodeObjects();
} }
logger->LogCompiledFunctions(); logger->LogCompiledFunctions();
...@@ -511,7 +511,7 @@ CpuProfiler::CpuProfiler(Isolate* isolate, CpuProfilingNamingMode naming_mode, ...@@ -511,7 +511,7 @@ CpuProfiler::CpuProfiler(Isolate* isolate, CpuProfilingNamingMode naming_mode,
naming_mode_(naming_mode), naming_mode_(naming_mode),
logging_mode_(logging_mode), logging_mode_(logging_mode),
base_sampling_interval_(base::TimeDelta::FromMicroseconds( base_sampling_interval_(base::TimeDelta::FromMicroseconds(
FLAG_cpu_profiler_sampling_interval)), v8_flags.cpu_profiler_sampling_interval)),
code_observer_(test_code_observer), code_observer_(test_code_observer),
profiles_(test_profiles), profiles_(test_profiles),
symbolizer_(test_symbolizer), symbolizer_(test_symbolizer),
......
...@@ -547,7 +547,7 @@ bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) { ...@@ -547,7 +547,7 @@ bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
// Size of an object can change during its life, so to keep information // Size of an object can change during its life, so to keep information
// about the object in entries_ consistent, we have to adjust size when the // about the object in entries_ consistent, we have to adjust size when the
// object is migrated. // object is migrated.
if (FLAG_heap_profiler_trace_objects) { if (v8_flags.heap_profiler_trace_objects) {
PrintF("Move object from %p to %p old size %6d new size %6d\n", PrintF("Move object from %p to %p old size %6d new size %6d\n",
reinterpret_cast<void*>(from), reinterpret_cast<void*>(to), reinterpret_cast<void*>(from), reinterpret_cast<void*>(to),
entries_.at(from_entry_info_index).size, object_size); entries_.at(from_entry_info_index).size, object_size);
...@@ -586,7 +586,7 @@ SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, ...@@ -586,7 +586,7 @@ SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
EntryInfo& entry_info = entries_.at(entry_index); EntryInfo& entry_info = entries_.at(entry_index);
entry_info.accessed = accessed; entry_info.accessed = accessed;
if (FLAG_heap_profiler_trace_objects) { if (v8_flags.heap_profiler_trace_objects) {
PrintF("Update object size : %p with old size %d and new size %d\n", PrintF("Update object size : %p with old size %d and new size %d\n",
reinterpret_cast<void*>(addr), entry_info.size, size); reinterpret_cast<void*>(addr), entry_info.size, size);
} }
...@@ -622,7 +622,7 @@ void HeapObjectsMap::AddMergedNativeEntry(NativeObject addr, ...@@ -622,7 +622,7 @@ void HeapObjectsMap::AddMergedNativeEntry(NativeObject addr,
void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); } void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); }
void HeapObjectsMap::UpdateHeapObjectsMap() { void HeapObjectsMap::UpdateHeapObjectsMap() {
if (FLAG_heap_profiler_trace_objects) { if (v8_flags.heap_profiler_trace_objects) {
PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
entries_map_.occupancy()); entries_map_.occupancy());
} }
...@@ -634,14 +634,14 @@ void HeapObjectsMap::UpdateHeapObjectsMap() { ...@@ -634,14 +634,14 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
obj = iterator.Next()) { obj = iterator.Next()) {
int object_size = obj.Size(cage_base); int object_size = obj.Size(cage_base);
FindOrAddEntry(obj.address(), object_size); FindOrAddEntry(obj.address(), object_size);
if (FLAG_heap_profiler_trace_objects) { if (v8_flags.heap_profiler_trace_objects) {
PrintF("Update object : %p %6d. Next address is %p\n", PrintF("Update object : %p %6d. Next address is %p\n",
reinterpret_cast<void*>(obj.address()), object_size, reinterpret_cast<void*>(obj.address()), object_size,
reinterpret_cast<void*>(obj.address() + object_size)); reinterpret_cast<void*>(obj.address() + object_size));
} }
} }
RemoveDeadEntries(); RemoveDeadEntries();
if (FLAG_heap_profiler_trace_objects) { if (v8_flags.heap_profiler_trace_objects) {
PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
entries_map_.occupancy()); entries_map_.occupancy());
} }
...@@ -877,7 +877,8 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) { ...@@ -877,7 +877,8 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) {
HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type, HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type,
const char* name) { const char* name) {
if (FLAG_heap_profiler_show_hidden_objects && type == HeapEntry::kHidden) { if (v8_flags.heap_profiler_show_hidden_objects &&
type == HeapEntry::kHidden) {
type = HeapEntry::kNative; type = HeapEntry::kNative;
} }
PtrComprCageBase cage_base(isolate()); PtrComprCageBase cage_base(isolate());
...@@ -2094,7 +2095,7 @@ bool V8HeapExplorer::IterateAndExtractReferences( ...@@ -2094,7 +2095,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
// objects, and fails DCHECKs if we attempt to. Read-only objects can // objects, and fails DCHECKs if we attempt to. Read-only objects can
// never retain read-write objects, so there is no risk in skipping // never retain read-write objects, so there is no risk in skipping
// verification for them. // verification for them.
if (FLAG_heap_snapshot_verify && if (v8_flags.heap_snapshot_verify &&
!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) { !BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
verifier = std::make_unique<HeapEntryVerifier>(generator, obj); verifier = std::make_unique<HeapEntryVerifier>(generator, obj);
} }
...@@ -2643,7 +2644,7 @@ bool NativeObjectsExplorer::IterateAndExtractReferences( ...@@ -2643,7 +2644,7 @@ bool NativeObjectsExplorer::IterateAndExtractReferences(
HeapSnapshotGenerator* generator) { HeapSnapshotGenerator* generator) {
generator_ = generator; generator_ = generator;
if (FLAG_heap_profiler_use_embedder_graph && if (v8_flags.heap_profiler_use_embedder_graph &&
snapshot_->profiler()->HasBuildEmbedderGraphCallback()) { snapshot_->profiler()->HasBuildEmbedderGraphCallback()) {
v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_)); v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
...@@ -2726,7 +2727,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() { ...@@ -2726,7 +2727,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
Heap* debug_heap = heap_; Heap* debug_heap = heap_;
if (FLAG_verify_heap) { if (v8_flags.verify_heap) {
HeapVerifier::VerifyHeap(debug_heap); HeapVerifier::VerifyHeap(debug_heap);
} }
#endif #endif
...@@ -2734,7 +2735,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() { ...@@ -2734,7 +2735,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
InitProgressCounter(); InitProgressCounter();
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (v8_flags.verify_heap) {
HeapVerifier::VerifyHeap(debug_heap); HeapVerifier::VerifyHeap(debug_heap);
} }
#endif #endif
......
...@@ -591,7 +591,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { ...@@ -591,7 +591,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
HeapEntry* result = HeapEntry* result =
entries_map_.emplace(ptr, allocator->AllocateEntry(ptr)).first->second; entries_map_.emplace(ptr, allocator->AllocateEntry(ptr)).first->second;
#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
if (FLAG_heap_snapshot_verify) { if (v8_flags.heap_snapshot_verify) {
reverse_entries_map_.emplace(result, ptr); reverse_entries_map_.emplace(result, ptr);
} }
#endif #endif
...@@ -602,7 +602,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { ...@@ -602,7 +602,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
HeapThing FindHeapThingForHeapEntry(HeapEntry* entry) { HeapThing FindHeapThingForHeapEntry(HeapEntry* entry) {
// The reverse lookup map is only populated if the verification flag is // The reverse lookup map is only populated if the verification flag is
// enabled. // enabled.
DCHECK(FLAG_heap_snapshot_verify); DCHECK(v8_flags.heap_snapshot_verify);
auto it = reverse_entries_map_.find(entry); auto it = reverse_entries_map_.find(entry);
return it == reverse_entries_map_.end() ? nullptr : it->second; return it == reverse_entries_map_.end() ? nullptr : it->second;
......
...@@ -25,7 +25,7 @@ namespace internal { ...@@ -25,7 +25,7 @@ namespace internal {
// Let u be a uniformly distributed random number between 0 and 1, then // Let u be a uniformly distributed random number between 0 and 1, then
// next_sample = (- ln u) / λ // next_sample = (- ln u) / λ
intptr_t SamplingHeapProfiler::Observer::GetNextSampleInterval(uint64_t rate) { intptr_t SamplingHeapProfiler::Observer::GetNextSampleInterval(uint64_t rate) {
if (FLAG_sampling_heap_profiler_suppress_randomness) if (v8_flags.sampling_heap_profiler_suppress_randomness)
return static_cast<intptr_t>(rate); return static_cast<intptr_t>(rate);
double u = random_->NextDouble(); double u = random_->NextDouble();
double next = (-base::ieee754::log(u)) * rate; double next = (-base::ieee754::log(u)) * rate;
......
...@@ -81,8 +81,8 @@ const char* StringsStorage::GetSymbol(Symbol sym) { ...@@ -81,8 +81,8 @@ const char* StringsStorage::GetSymbol(Symbol sym) {
return "<symbol>"; return "<symbol>";
} }
String description = String::cast(sym.description()); String description = String::cast(sym.description());
int length = int length = std::min(v8_flags.heap_snapshot_string_limit.value(),
std::min(FLAG_heap_snapshot_string_limit.value(), description.length()); description.length());
auto data = description.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, auto data = description.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0,
length, &length); length, &length);
if (sym.is_private_name()) { if (sym.is_private_name()) {
...@@ -98,7 +98,7 @@ const char* StringsStorage::GetName(Name name) { ...@@ -98,7 +98,7 @@ const char* StringsStorage::GetName(Name name) {
if (name.IsString()) { if (name.IsString()) {
String str = String::cast(name); String str = String::cast(name);
int length = int length =
std::min(FLAG_heap_snapshot_string_limit.value(), str.length()); std::min(v8_flags.heap_snapshot_string_limit.value(), str.length());
int actual_length = 0; int actual_length = 0;
std::unique_ptr<char[]> data = str.ToCString( std::unique_ptr<char[]> data = str.ToCString(
DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length); DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length);
...@@ -117,7 +117,7 @@ const char* StringsStorage::GetConsName(const char* prefix, Name name) { ...@@ -117,7 +117,7 @@ const char* StringsStorage::GetConsName(const char* prefix, Name name) {
if (name.IsString()) { if (name.IsString()) {
String str = String::cast(name); String str = String::cast(name);
int length = int length =
std::min(FLAG_heap_snapshot_string_limit.value(), str.length()); std::min(v8_flags.heap_snapshot_string_limit.value(), str.length());
int actual_length = 0; int actual_length = 0;
std::unique_ptr<char[]> data = str.ToCString( std::unique_ptr<char[]> data = str.ToCString(
DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length); DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length);
......
...@@ -161,7 +161,7 @@ Symbolizer::SymbolizedSample Symbolizer::SymbolizeTickSample( ...@@ -161,7 +161,7 @@ Symbolizer::SymbolizedSample Symbolizer::SymbolizeTickSample(
} }
} }
if (FLAG_prof_browser_mode) { if (v8_flags.prof_browser_mode) {
bool no_symbolized_entries = true; bool no_symbolized_entries = true;
for (auto e : stack_trace) { for (auto e : stack_trace) {
if (e.code_entry != nullptr) { if (e.code_entry != nullptr) {
......
...@@ -125,11 +125,11 @@ namespace { ...@@ -125,11 +125,11 @@ namespace {
class TestSetup { class TestSetup {
public: public:
TestSetup() : old_flag_prof_browser_mode_(i::FLAG_prof_browser_mode) { TestSetup() : old_flag_prof_browser_mode_(v8_flags.prof_browser_mode) {
i::FLAG_prof_browser_mode = false; v8_flags.prof_browser_mode = false;
} }
~TestSetup() { i::FLAG_prof_browser_mode = old_flag_prof_browser_mode_; } ~TestSetup() { v8_flags.prof_browser_mode = old_flag_prof_browser_mode_; }
private: private:
bool old_flag_prof_browser_mode_; bool old_flag_prof_browser_mode_;
...@@ -762,9 +762,9 @@ static const char* cpu_profiler_test_source = ...@@ -762,9 +762,9 @@ static const char* cpu_profiler_test_source =
TEST(CollectCpuProfile) { TEST(CollectCpuProfile) {
// Skip test if concurrent sparkplug is enabled. The test becomes flaky, // Skip test if concurrent sparkplug is enabled. The test becomes flaky,
// since it requires a precise trace. // since it requires a precise trace.
if (i::FLAG_concurrent_sparkplug) return; if (v8_flags.concurrent_sparkplug) return;
i::FLAG_allow_natives_syntax = true; v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -795,9 +795,9 @@ TEST(CollectCpuProfile) { ...@@ -795,9 +795,9 @@ TEST(CollectCpuProfile) {
TEST(CollectCpuProfileCallerLineNumbers) { TEST(CollectCpuProfileCallerLineNumbers) {
// Skip test if concurrent sparkplug is enabled. The test becomes flaky, // Skip test if concurrent sparkplug is enabled. The test becomes flaky,
// since it requires a precise trace. // since it requires a precise trace.
if (i::FLAG_concurrent_sparkplug) return; if (v8_flags.concurrent_sparkplug) return;
i::FLAG_allow_natives_syntax = true; v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -859,7 +859,7 @@ static const char* hot_deopt_no_frame_entry_test_source = ...@@ -859,7 +859,7 @@ static const char* hot_deopt_no_frame_entry_test_source =
// If 'foo' has no ranges the samples falling into the prologue will miss the // If 'foo' has no ranges the samples falling into the prologue will miss the
// 'start' function on the stack, so 'foo' will be attached to the (root). // 'start' function on the stack, so 'foo' will be attached to the (root).
TEST(HotDeoptNoFrameEntry) { TEST(HotDeoptNoFrameEntry) {
i::FLAG_allow_natives_syntax = true; v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -882,7 +882,7 @@ TEST(HotDeoptNoFrameEntry) { ...@@ -882,7 +882,7 @@ TEST(HotDeoptNoFrameEntry) {
} }
TEST(CollectCpuProfileSamples) { TEST(CollectCpuProfileSamples) {
i::FLAG_allow_natives_syntax = true; v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -936,7 +936,7 @@ static const char* cpu_profiler_test_source2 = ...@@ -936,7 +936,7 @@ static const char* cpu_profiler_test_source2 =
// 16 16 loop [-1] #5 // 16 16 loop [-1] #5
// 14 14 (program) [-1] #2 // 14 14 (program) [-1] #2
TEST(SampleWhenFrameIsNotSetup) { TEST(SampleWhenFrameIsNotSetup) {
i::FLAG_allow_natives_syntax = true; v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -1235,15 +1235,15 @@ TEST(BoundFunctionCall) { ...@@ -1235,15 +1235,15 @@ TEST(BoundFunctionCall) {
// This tests checks distribution of the samples through the source lines. // This tests checks distribution of the samples through the source lines.
static void TickLines(bool optimize) { static void TickLines(bool optimize) {
#ifndef V8_LITE_MODE #ifndef V8_LITE_MODE
FLAG_turbofan = optimize; v8_flags.turbofan = optimize;
#ifdef V8_ENABLE_MAGLEV #ifdef V8_ENABLE_MAGLEV
// TODO(v8:7700): Also test maglev here. // TODO(v8:7700): Also test maglev here.
FLAG_maglev = false; v8_flags.maglev = false;
#endif // V8_ENABLE_MAGLEV #endif // V8_ENABLE_MAGLEV
#endif // V8_LITE_MODE #endif // V8_LITE_MODE
CcTest::InitializeVM(); CcTest::InitializeVM();
LocalContext env; LocalContext env;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
i::Isolate* isolate = CcTest::i_isolate(); i::Isolate* isolate = CcTest::i_isolate();
i::Factory* factory = isolate->factory(); i::Factory* factory = isolate->factory();
i::HandleScope scope(isolate); i::HandleScope scope(isolate);
...@@ -1399,9 +1399,9 @@ static const char* call_function_test_source = ...@@ -1399,9 +1399,9 @@ static const char* call_function_test_source =
TEST(FunctionCallSample) { TEST(FunctionCallSample) {
// Skip test if concurrent sparkplug is enabled. The test becomes flaky, // Skip test if concurrent sparkplug is enabled. The test becomes flaky,
// since it requires a precise trace. // since it requires a precise trace.
if (i::FLAG_concurrent_sparkplug) return; if (i::v8_flags.concurrent_sparkplug) return;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -1460,9 +1460,9 @@ static const char* function_apply_test_source = ...@@ -1460,9 +1460,9 @@ static const char* function_apply_test_source =
TEST(FunctionApplySample) { TEST(FunctionApplySample) {
// Skip test if concurrent sparkplug is enabled. The test becomes flaky, // Skip test if concurrent sparkplug is enabled. The test becomes flaky,
// since it requires a precise trace. // since it requires a precise trace.
if (i::FLAG_concurrent_sparkplug) return; if (i::v8_flags.concurrent_sparkplug) return;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -1569,7 +1569,7 @@ static void CallJsFunction(const v8::FunctionCallbackInfo<v8::Value>& info) { ...@@ -1569,7 +1569,7 @@ static void CallJsFunction(const v8::FunctionCallbackInfo<v8::Value>& info) {
// 55 1 bar #16 5 // 55 1 bar #16 5
// 54 54 foo #16 6 // 54 54 foo #16 6
TEST(JsNativeJsSample) { TEST(JsNativeJsSample) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -1622,7 +1622,7 @@ static const char* js_native_js_runtime_js_test_source = ...@@ -1622,7 +1622,7 @@ static const char* js_native_js_runtime_js_test_source =
// 51 51 foo #16 6 // 51 51 foo #16 6
// 2 2 (program) #0 2 // 2 2 (program) #0 2
TEST(JsNativeJsRuntimeJsSample) { TEST(JsNativeJsRuntimeJsSample) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -1679,7 +1679,7 @@ static const char* js_native1_js_native2_js_test_source = ...@@ -1679,7 +1679,7 @@ static const char* js_native1_js_native2_js_test_source =
// 54 54 foo #16 7 // 54 54 foo #16 7
// 2 2 (program) #0 2 // 2 2 (program) #0 2
TEST(JsNative1JsNative2JsSample) { TEST(JsNative1JsNative2JsSample) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -1779,7 +1779,7 @@ static const char* js_native_js_runtime_multiple_test_source = ...@@ -1779,7 +1779,7 @@ static const char* js_native_js_runtime_multiple_test_source =
// foo #16 6 // foo #16 6
// (program) #0 2 // (program) #0 2
TEST(JsNativeJsRuntimeJsSampleMultiple) { TEST(JsNativeJsRuntimeJsSampleMultiple) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -1847,7 +1847,7 @@ static const char* inlining_test_source = ...@@ -1847,7 +1847,7 @@ static const char* inlining_test_source =
// action #16 7 // action #16 7
// (program) #0 2 // (program) #0 2
TEST(Inlining) { TEST(Inlining) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -1945,9 +1945,9 @@ static const char* inlining_test_source2 = R"( ...@@ -1945,9 +1945,9 @@ static const char* inlining_test_source2 = R"(
TEST(Inlining2) { TEST(Inlining2) {
// Skip test if concurrent sparkplug is enabled. The test becomes flaky, // Skip test if concurrent sparkplug is enabled. The test becomes flaky,
// since it requires a precise trace. // since it requires a precise trace.
if (FLAG_concurrent_sparkplug) return; if (v8_flags.concurrent_sparkplug) return;
FLAG_allow_natives_syntax = true; v8_flags.allow_natives_syntax = true;
v8::Isolate* isolate = CcTest::isolate(); v8::Isolate* isolate = CcTest::isolate();
LocalContext env; LocalContext env;
v8::CpuProfiler::UseDetailedSourcePositionsForProfiling(isolate); v8::CpuProfiler::UseDetailedSourcePositionsForProfiling(isolate);
...@@ -2037,9 +2037,9 @@ static const char* cross_script_source_b = R"( ...@@ -2037,9 +2037,9 @@ static const char* cross_script_source_b = R"(
TEST(CrossScriptInliningCallerLineNumbers) { TEST(CrossScriptInliningCallerLineNumbers) {
// Skip test if concurrent sparkplug is enabled. The test becomes flaky, // Skip test if concurrent sparkplug is enabled. The test becomes flaky,
// since it requires a precise trace. // since it requires a precise trace.
if (i::FLAG_concurrent_sparkplug) return; if (i::v8_flags.concurrent_sparkplug) return;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::Isolate* isolate = CcTest::isolate(); v8::Isolate* isolate = CcTest::isolate();
LocalContext env; LocalContext env;
v8::CpuProfiler::UseDetailedSourcePositionsForProfiling(isolate); v8::CpuProfiler::UseDetailedSourcePositionsForProfiling(isolate);
...@@ -2132,9 +2132,9 @@ static const char* cross_script_source_f = R"( ...@@ -2132,9 +2132,9 @@ static const char* cross_script_source_f = R"(
TEST(CrossScriptInliningCallerLineNumbers2) { TEST(CrossScriptInliningCallerLineNumbers2) {
// Skip test if concurrent sparkplug is enabled. The test becomes flaky, // Skip test if concurrent sparkplug is enabled. The test becomes flaky,
// since it requires a precise trace. // since it requires a precise trace.
if (i::FLAG_concurrent_sparkplug) return; if (i::v8_flags.concurrent_sparkplug) return;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
ProfilerHelper helper(env.local()); ProfilerHelper helper(env.local());
...@@ -2251,7 +2251,7 @@ static void CheckFunctionDetails(v8::Isolate* isolate, ...@@ -2251,7 +2251,7 @@ static void CheckFunctionDetails(v8::Isolate* isolate,
} }
TEST(FunctionDetails) { TEST(FunctionDetails) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -2302,8 +2302,9 @@ TEST(FunctionDetails) { ...@@ -2302,8 +2302,9 @@ TEST(FunctionDetails) {
} }
TEST(FunctionDetailsInlining) { TEST(FunctionDetailsInlining) {
if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan)
i::FLAG_allow_natives_syntax = true; return;
i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -2434,7 +2435,7 @@ static const char* pre_profiling_osr_script = R"( ...@@ -2434,7 +2435,7 @@ static const char* pre_profiling_osr_script = R"(
// 0 startProfiling:0 2 0 #4 // 0 startProfiling:0 2 0 #4
TEST(StartProfilingAfterOsr) { TEST(StartProfilingAfterOsr) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -2510,8 +2511,9 @@ const char* GetBranchDeoptReason(v8::Local<v8::Context> context, ...@@ -2510,8 +2511,9 @@ const char* GetBranchDeoptReason(v8::Local<v8::Context> context,
// deopt at top function // deopt at top function
TEST(CollectDeoptEvents) { TEST(CollectDeoptEvents) {
if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan)
i::FLAG_allow_natives_syntax = true; return;
i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -2625,7 +2627,7 @@ TEST(CollectDeoptEvents) { ...@@ -2625,7 +2627,7 @@ TEST(CollectDeoptEvents) {
} }
TEST(SourceLocation) { TEST(SourceLocation) {
i::FLAG_always_turbofan = true; i::v8_flags.always_turbofan = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
...@@ -2648,8 +2650,9 @@ static const char* inlined_source = ...@@ -2648,8 +2650,9 @@ static const char* inlined_source =
// deopt at the first level inlined function // deopt at the first level inlined function
TEST(DeoptAtFirstLevelInlinedSource) { TEST(DeoptAtFirstLevelInlinedSource) {
if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan)
i::FLAG_allow_natives_syntax = true; return;
i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -2720,8 +2723,9 @@ TEST(DeoptAtFirstLevelInlinedSource) { ...@@ -2720,8 +2723,9 @@ TEST(DeoptAtFirstLevelInlinedSource) {
// deopt at the second level inlined function // deopt at the second level inlined function
TEST(DeoptAtSecondLevelInlinedSource) { TEST(DeoptAtSecondLevelInlinedSource) {
if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan)
i::FLAG_allow_natives_syntax = true; return;
i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -2798,8 +2802,9 @@ TEST(DeoptAtSecondLevelInlinedSource) { ...@@ -2798,8 +2802,9 @@ TEST(DeoptAtSecondLevelInlinedSource) {
// deopt in untracked function // deopt in untracked function
TEST(DeoptUntrackedFunction) { TEST(DeoptUntrackedFunction) {
if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan)
i::FLAG_allow_natives_syntax = true; return;
i::v8_flags.allow_natives_syntax = true;
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID});
v8::Context::Scope context_scope(env); v8::Context::Scope context_scope(env);
...@@ -3017,15 +3022,15 @@ TEST(Issue763073) { ...@@ -3017,15 +3022,15 @@ TEST(Issue763073) {
class AllowNativesSyntax { class AllowNativesSyntax {
public: public:
AllowNativesSyntax() AllowNativesSyntax()
: allow_natives_syntax_(i::FLAG_allow_natives_syntax), : allow_natives_syntax_(i::v8_flags.allow_natives_syntax),
trace_deopt_(i::FLAG_trace_deopt) { trace_deopt_(i::v8_flags.trace_deopt) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
i::FLAG_trace_deopt = true; i::v8_flags.trace_deopt = true;
} }
~AllowNativesSyntax() { ~AllowNativesSyntax() {
i::FLAG_allow_natives_syntax = allow_natives_syntax_; i::v8_flags.allow_natives_syntax = allow_natives_syntax_;
i::FLAG_trace_deopt = trace_deopt_; i::v8_flags.trace_deopt = trace_deopt_;
} }
private: private:
...@@ -3079,7 +3084,7 @@ static void CallStaticCollectSample( ...@@ -3079,7 +3084,7 @@ static void CallStaticCollectSample(
} }
TEST(StaticCollectSampleAPI) { TEST(StaticCollectSampleAPI) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -3431,7 +3436,7 @@ class UnlockingThread : public v8::base::Thread { ...@@ -3431,7 +3436,7 @@ class UnlockingThread : public v8::base::Thread {
// Checking for crashes with multiple thread/single Isolate profiling. // Checking for crashes with multiple thread/single Isolate profiling.
TEST(MultipleThreadsSingleIsolate) { TEST(MultipleThreadsSingleIsolate) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::Isolate* isolate = CcTest::isolate(); v8::Isolate* isolate = CcTest::isolate();
v8::Locker locker(isolate); v8::Locker locker(isolate);
v8::HandleScope scope(isolate); v8::HandleScope scope(isolate);
...@@ -3891,7 +3896,7 @@ TEST(Bug9151StaleCodeEntries) { ...@@ -3891,7 +3896,7 @@ TEST(Bug9151StaleCodeEntries) {
// Tests that functions from other contexts aren't recorded when filtering for // Tests that functions from other contexts aren't recorded when filtering for
// another context. // another context.
TEST(ContextIsolation) { TEST(ContextIsolation) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
LocalContext execution_env; LocalContext execution_env;
i::HandleScope scope(CcTest::i_isolate()); i::HandleScope scope(CcTest::i_isolate());
...@@ -3984,7 +3989,7 @@ void ValidateEmbedderState(v8::CpuProfile* profile, ...@@ -3984,7 +3989,7 @@ void ValidateEmbedderState(v8::CpuProfile* profile,
// Tests that embedder states from other contexts aren't recorded // Tests that embedder states from other contexts aren't recorded
TEST(EmbedderContextIsolation) { TEST(EmbedderContextIsolation) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
LocalContext execution_env; LocalContext execution_env;
i::HandleScope scope(CcTest::i_isolate()); i::HandleScope scope(CcTest::i_isolate());
...@@ -4047,7 +4052,7 @@ TEST(EmbedderContextIsolation) { ...@@ -4047,7 +4052,7 @@ TEST(EmbedderContextIsolation) {
// Tests that embedder states from same context are recorded // Tests that embedder states from same context are recorded
TEST(EmbedderStatePropagate) { TEST(EmbedderStatePropagate) {
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
LocalContext execution_env; LocalContext execution_env;
i::HandleScope scope(CcTest::i_isolate()); i::HandleScope scope(CcTest::i_isolate());
...@@ -4110,12 +4115,13 @@ TEST(EmbedderStatePropagate) { ...@@ -4110,12 +4115,13 @@ TEST(EmbedderStatePropagate) {
// even after native context move // even after native context move
TEST(EmbedderStatePropagateNativeContextMove) { TEST(EmbedderStatePropagateNativeContextMove) {
// Reusing context addresses will cause this test to fail. // Reusing context addresses will cause this test to fail.
if (i::FLAG_gc_global || i::FLAG_stress_compaction || if (i::v8_flags.gc_global || i::v8_flags.stress_compaction ||
i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) { i::v8_flags.stress_incremental_marking ||
i::v8_flags.enable_third_party_heap) {
return; return;
} }
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
i::FLAG_manual_evacuation_candidates_selection = true; i::v8_flags.manual_evacuation_candidates_selection = true;
LocalContext execution_env; LocalContext execution_env;
i::HandleScope scope(CcTest::i_isolate()); i::HandleScope scope(CcTest::i_isolate());
...@@ -4184,9 +4190,9 @@ TEST(EmbedderStatePropagateNativeContextMove) { ...@@ -4184,9 +4190,9 @@ TEST(EmbedderStatePropagateNativeContextMove) {
// Tests that when a native context that's being filtered is moved, we continue // Tests that when a native context that's being filtered is moved, we continue
// to track its execution. // to track its execution.
TEST(ContextFilterMovedNativeContext) { TEST(ContextFilterMovedNativeContext) {
if (i::FLAG_enable_third_party_heap) return; if (i::v8_flags.enable_third_party_heap) return;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
i::FLAG_manual_evacuation_candidates_selection = true; i::v8_flags.manual_evacuation_candidates_selection = true;
LocalContext env; LocalContext env;
i::HandleScope scope(CcTest::i_isolate()); i::HandleScope scope(CcTest::i_isolate());
...@@ -4267,8 +4273,8 @@ int GetSourcePositionEntryCount(i::Isolate* isolate, const char* source, ...@@ -4267,8 +4273,8 @@ int GetSourcePositionEntryCount(i::Isolate* isolate, const char* source,
} }
UNINITIALIZED_TEST(DetailedSourcePositionAPI) { UNINITIALIZED_TEST(DetailedSourcePositionAPI) {
i::FLAG_detailed_line_info = false; i::v8_flags.detailed_line_info = false;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::Isolate::CreateParams create_params; v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params); v8::Isolate* isolate = v8::Isolate::New(create_params);
...@@ -4308,11 +4314,11 @@ UNINITIALIZED_TEST(DetailedSourcePositionAPI) { ...@@ -4308,11 +4314,11 @@ UNINITIALIZED_TEST(DetailedSourcePositionAPI) {
} }
UNINITIALIZED_TEST(DetailedSourcePositionAPI_Inlining) { UNINITIALIZED_TEST(DetailedSourcePositionAPI_Inlining) {
i::FLAG_detailed_line_info = false; i::v8_flags.detailed_line_info = false;
i::FLAG_turbo_inlining = true; i::v8_flags.turbo_inlining = true;
i::FLAG_stress_inline = true; i::v8_flags.stress_inline = true;
i::FLAG_always_turbofan = false; i::v8_flags.always_turbofan = false;
i::FLAG_allow_natives_syntax = true; i::v8_flags.allow_natives_syntax = true;
v8::Isolate::CreateParams create_params; v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params); v8::Isolate* isolate = v8::Isolate::New(create_params);
...@@ -4457,7 +4463,7 @@ TEST(CanStartStopProfilerWithTitlesAndIds) { ...@@ -4457,7 +4463,7 @@ TEST(CanStartStopProfilerWithTitlesAndIds) {
TEST(FastApiCPUProfiler) { TEST(FastApiCPUProfiler) {
#if !defined(V8_LITE_MODE) && !defined(USE_SIMULATOR) #if !defined(V8_LITE_MODE) && !defined(USE_SIMULATOR)
// None of the following configurations include JSCallReducer. // None of the following configurations include JSCallReducer.
if (i::FLAG_jitless) return; if (i::v8_flags.jitless) return;
FLAG_SCOPE(turbofan); FLAG_SCOPE(turbofan);
FLAG_SCOPE(turbo_fast_api_calls); FLAG_SCOPE(turbo_fast_api_calls);
...@@ -4556,15 +4562,15 @@ TEST(FastApiCPUProfiler) { ...@@ -4556,15 +4562,15 @@ TEST(FastApiCPUProfiler) {
TEST(BytecodeFlushEventsEagerLogging) { TEST(BytecodeFlushEventsEagerLogging) {
#ifndef V8_LITE_MODE #ifndef V8_LITE_MODE
FLAG_turbofan = false; v8_flags.turbofan = false;
FLAG_always_turbofan = false; v8_flags.always_turbofan = false;
i::FLAG_optimize_for_size = false; v8_flags.optimize_for_size = false;
#endif // V8_LITE_MODE #endif // V8_LITE_MODE
#if ENABLE_SPARKPLUG #if ENABLE_SPARKPLUG
FLAG_always_sparkplug = false; v8_flags.always_sparkplug = false;
#endif // ENABLE_SPARKPLUG #endif // ENABLE_SPARKPLUG
i::FLAG_flush_bytecode = true; v8_flags.flush_bytecode = true;
i::FLAG_allow_natives_syntax = true; v8_flags.allow_natives_syntax = true;
TestSetup test_setup; TestSetup test_setup;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
......
...@@ -53,10 +53,10 @@ using i::AllocationTraceNode; ...@@ -53,10 +53,10 @@ using i::AllocationTraceNode;
using i::AllocationTraceTree; using i::AllocationTraceTree;
using i::AllocationTracker; using i::AllocationTracker;
using i::SourceLocation; using i::SourceLocation;
using i::heap::GrowNewSpaceToMaximumCapacity;
using v8::base::ArrayVector; using v8::base::ArrayVector;
using v8::base::Optional; using v8::base::Optional;
using v8::base::Vector; using v8::base::Vector;
using v8::internal::heap::GrowNewSpaceToMaximumCapacity;
namespace { namespace {
...@@ -1297,7 +1297,7 @@ static TestStatsStream GetHeapStatsUpdate( ...@@ -1297,7 +1297,7 @@ static TestStatsStream GetHeapStatsUpdate(
TEST(HeapSnapshotObjectsStats) { TEST(HeapSnapshotObjectsStats) {
// Concurrent allocation might break results // Concurrent allocation might break results
v8::internal::v8_flags.stress_concurrent_allocation = false; i::v8_flags.stress_concurrent_allocation = false;
LocalContext env; LocalContext env;
v8::HandleScope scope(env->GetIsolate()); v8::HandleScope scope(env->GetIsolate());
...@@ -2639,7 +2639,7 @@ TEST(ManyLocalsInSharedContext) { ...@@ -2639,7 +2639,7 @@ TEST(ManyLocalsInSharedContext) {
env->GetIsolate(), ok_object, v8::HeapGraphEdge::kInternal, "context"); env->GetIsolate(), ok_object, v8::HeapGraphEdge::kInternal, "context");
CHECK(context_object); CHECK(context_object);
// Check the objects are not duplicated in the context. // Check the objects are not duplicated in the context.
CHECK_EQ(v8::internal::Context::MIN_CONTEXT_EXTENDED_SLOTS + num_objects - 1, CHECK_EQ(i::Context::MIN_CONTEXT_EXTENDED_SLOTS + num_objects - 1,
context_object->GetChildrenCount()); context_object->GetChildrenCount());
// Check all the objects have got their names. // Check all the objects have got their names.
// ... well check just every 15th because otherwise it's too slow in debug. // ... well check just every 15th because otherwise it's too slow in debug.
...@@ -2695,7 +2695,7 @@ TEST(AllocationSitesAreVisible) { ...@@ -2695,7 +2695,7 @@ TEST(AllocationSitesAreVisible) {
v8::HeapGraphEdge::kInternal, "elements"); v8::HeapGraphEdge::kInternal, "elements");
CHECK(elements); CHECK(elements);
CHECK_EQ(v8::HeapGraphNode::kCode, elements->GetType()); CHECK_EQ(v8::HeapGraphNode::kCode, elements->GetType());
CHECK_EQ(v8::internal::FixedArray::SizeFor(3), CHECK_EQ(i::FixedArray::SizeFor(3),
static_cast<int>(elements->GetShallowSize())); static_cast<int>(elements->GetShallowSize()));
v8::Local<v8::Value> array_val = v8::Local<v8::Value> array_val =
...@@ -3704,10 +3704,10 @@ TEST(SamplingHeapProfiler) { ...@@ -3704,10 +3704,10 @@ TEST(SamplingHeapProfiler) {
// Turn off always_turbofan. Inlining can cause stack traces to be shorter // Turn off always_turbofan. Inlining can cause stack traces to be shorter
// than what we expect in this test. // than what we expect in this test.
v8::internal::v8_flags.always_turbofan = false; i::v8_flags.always_turbofan = false;
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
// Sample should be empty if requested before sampling has started. // Sample should be empty if requested before sampling has started.
{ {
...@@ -3788,16 +3788,16 @@ TEST(SamplingHeapProfilerRateAgnosticEstimates) { ...@@ -3788,16 +3788,16 @@ TEST(SamplingHeapProfilerRateAgnosticEstimates) {
// Turn off always_turbofan. Inlining can cause stack traces to be shorter // Turn off always_turbofan. Inlining can cause stack traces to be shorter
// than what we expect in this test. // than what we expect in this test.
v8::internal::v8_flags.always_turbofan = false; i::v8_flags.always_turbofan = false;
// Disable compilation cache to force compilation in both cases // Disable compilation cache to force compilation in both cases
v8::internal::v8_flags.compilation_cache = false; i::v8_flags.compilation_cache = false;
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
// stress_incremental_marking adds randomness to the test. // stress_incremental_marking adds randomness to the test.
v8::internal::v8_flags.stress_incremental_marking = false; i::v8_flags.stress_incremental_marking = false;
// warmup compilation // warmup compilation
CompileRun(simple_sampling_heap_profiler_script); CompileRun(simple_sampling_heap_profiler_script);
...@@ -3869,7 +3869,7 @@ TEST(SamplingHeapProfilerApiAllocation) { ...@@ -3869,7 +3869,7 @@ TEST(SamplingHeapProfilerApiAllocation) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(256); heap_profiler->StartSamplingHeapProfiler(256);
...@@ -3892,7 +3892,7 @@ TEST(SamplingHeapProfilerApiSamples) { ...@@ -3892,7 +3892,7 @@ TEST(SamplingHeapProfilerApiSamples) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(1024); heap_profiler->StartSamplingHeapProfiler(1024);
...@@ -3937,7 +3937,7 @@ TEST(SamplingHeapProfilerLeftTrimming) { ...@@ -3937,7 +3937,7 @@ TEST(SamplingHeapProfilerLeftTrimming) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(64); heap_profiler->StartSamplingHeapProfiler(64);
...@@ -3950,7 +3950,7 @@ TEST(SamplingHeapProfilerLeftTrimming) { ...@@ -3950,7 +3950,7 @@ TEST(SamplingHeapProfilerLeftTrimming) {
" a.shift();\n" " a.shift();\n"
"}\n"); "}\n");
CcTest::CollectGarbage(v8::internal::NEW_SPACE); CcTest::CollectGarbage(i::NEW_SPACE);
// Should not crash. // Should not crash.
heap_profiler->StopSamplingHeapProfiler(); heap_profiler->StopSamplingHeapProfiler();
...@@ -3975,7 +3975,7 @@ TEST(SamplingHeapProfilerPretenuredInlineAllocations) { ...@@ -3975,7 +3975,7 @@ TEST(SamplingHeapProfilerPretenuredInlineAllocations) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
GrowNewSpaceToMaximumCapacity(CcTest::heap()); GrowNewSpaceToMaximumCapacity(CcTest::heap());
...@@ -4037,7 +4037,7 @@ TEST(SamplingHeapProfilerLargeInterval) { ...@@ -4037,7 +4037,7 @@ TEST(SamplingHeapProfilerLargeInterval) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(512 * 1024); heap_profiler->StartSamplingHeapProfiler(512 * 1024);
...@@ -4075,7 +4075,7 @@ TEST(SamplingHeapProfilerSampleDuringDeopt) { ...@@ -4075,7 +4075,7 @@ TEST(SamplingHeapProfilerSampleDuringDeopt) {
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests. // Suppress randomness to avoid flakiness in tests.
v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; i::v8_flags.sampling_heap_profiler_suppress_randomness = true;
// Small sample interval to force each object to be sampled. // Small sample interval to force each object to be sampled.
heap_profiler->StartSamplingHeapProfiler(i::kTaggedSize); heap_profiler->StartSamplingHeapProfiler(i::kTaggedSize);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment