Commit 7c79ab6d authored by Nikolaos Papaspyrou's avatar Nikolaos Papaspyrou Committed by V8 LUCI CQ

[heap] Fix setting start of stack

The stack of an isolate's main thread is kept in the isolate's heap.
This CL sets the stack's start address when the isolate's heap is set
up; it can also be set explicitly from the embedder. The CL also fixes
threaded cctests, where an isolate is shared by many "main" threads.

Bug: v8:13257
Change-Id: Ie30bbbe4130882d94f23de946cbada748f32e22d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3870923Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82973}
parent c6a8832e
...@@ -520,7 +520,6 @@ void CppHeap::AttachIsolate(Isolate* isolate) { ...@@ -520,7 +520,6 @@ void CppHeap::AttachIsolate(Isolate* isolate) {
&CppGraphBuilder::Run, this); &CppGraphBuilder::Run, this);
} }
SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this)); SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
isolate_->heap()->SetStackStart(base::Stack::GetStackStart());
oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl); oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
ReduceGCCapabilititesFromFlags(); ReduceGCCapabilititesFromFlags();
no_gc_scope_--; no_gc_scope_--;
......
...@@ -5481,7 +5481,7 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, ...@@ -5481,7 +5481,7 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
tracer_.reset(new GCTracer(this)); tracer_.reset(new GCTracer(this));
array_buffer_sweeper_.reset(new ArrayBufferSweeper(this)); array_buffer_sweeper_.reset(new ArrayBufferSweeper(this));
gc_idle_time_handler_.reset(new GCIdleTimeHandler()); gc_idle_time_handler_.reset(new GCIdleTimeHandler());
stack_ = std::make_unique<::heap::base::Stack>(); stack_ = std::make_unique<::heap::base::Stack>(base::Stack::GetStackStart());
memory_measurement_.reset(new MemoryMeasurement(isolate())); memory_measurement_.reset(new MemoryMeasurement(isolate()));
if (!IsShared()) memory_reducer_.reset(new MemoryReducer(this)); if (!IsShared()) memory_reducer_.reset(new MemoryReducer(this));
if (V8_UNLIKELY(TracingFlags::is_gc_stats_enabled())) { if (V8_UNLIKELY(TracingFlags::is_gc_stats_enabled())) {
......
...@@ -13014,8 +13014,12 @@ void ApiTestFuzzer::Run() { ...@@ -13014,8 +13014,12 @@ void ApiTestFuzzer::Run() {
// When it is our turn... // When it is our turn...
gate_.Wait(); gate_.Wait();
{ {
// ... get the V8 lock and start running the test. // ... get the V8 lock
v8::Locker locker(CcTest::isolate()); v8::Locker locker(CcTest::isolate());
// ... set the isolate stack to this thread
CcTest::i_isolate()->heap()->SetStackStart(
v8::base::Stack::GetStackStart());
// ... and start running the test.
CallTest(); CallTest();
} }
// This test finished. // This test finished.
...@@ -13082,6 +13086,9 @@ void ApiTestFuzzer::ContextSwitch() { ...@@ -13082,6 +13086,9 @@ void ApiTestFuzzer::ContextSwitch() {
v8::Unlocker unlocker(CcTest::isolate()); v8::Unlocker unlocker(CcTest::isolate());
// Wait till someone starts us again. // Wait till someone starts us again.
gate_.Wait(); gate_.Wait();
// Set the isolate stack to this thread.
CcTest::i_isolate()->heap()->SetStackStart(
v8::base::Stack::GetStackStart());
// And we're off. // And we're off.
} }
} }
...@@ -247,8 +247,6 @@ TEST_F(TracedReferenceTest, WriteBarrierForOnStackReset) { ...@@ -247,8 +247,6 @@ TEST_F(TracedReferenceTest, WriteBarrierForOnStackReset) {
if (!v8_flags.incremental_marking) if (!v8_flags.incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking"; GTEST_SKIP() << "Write barrier tests require incremental marking";
heap()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate()); v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context); v8::Context::Scope context_scope(context);
{ {
...@@ -290,8 +288,6 @@ TEST_F(TracedReferenceTest, WriteBarrierForOnStackCopy) { ...@@ -290,8 +288,6 @@ TEST_F(TracedReferenceTest, WriteBarrierForOnStackCopy) {
if (!v8_flags.incremental_marking) if (!v8_flags.incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking"; GTEST_SKIP() << "Write barrier tests require incremental marking";
heap()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate()); v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context); v8::Context::Scope context_scope(context);
{ {
...@@ -336,8 +332,6 @@ TEST_F(TracedReferenceTest, WriteBarrierForOnStackMove) { ...@@ -336,8 +332,6 @@ TEST_F(TracedReferenceTest, WriteBarrierForOnStackMove) {
if (!v8_flags.incremental_marking) if (!v8_flags.incremental_marking)
GTEST_SKIP() << "Write barrier tests require incremental marking"; GTEST_SKIP() << "Write barrier tests require incremental marking";
heap()->SetStackStart(base::Stack::GetStackStart());
v8::Local<v8::Context> context = v8::Context::New(v8_isolate()); v8::Local<v8::Context> context = v8::Context::New(v8_isolate());
v8::Context::Scope context_scope(context); v8::Context::Scope context_scope(context);
{ {
......
...@@ -687,7 +687,17 @@ TEST_F(EmbedderTracingTest, TracedReferenceHandlesMarking) { ...@@ -687,7 +687,17 @@ TEST_F(EmbedderTracingTest, TracedReferenceHandlesMarking) {
heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer); heap::TemporaryEmbedderHeapTracerScope tracer_scope(v8_isolate(), &tracer);
tracer.AddReferenceForTracing(live.get()); tracer.AddReferenceForTracing(live.get());
const size_t initial_count = global_handles->handles_count(); const size_t initial_count = global_handles->handles_count();
{
// Conservative scanning may find stale pointers to on-stack handles.
// Disable scanning, assuming the slots are overwritten.
EmbedderStackStateScope scope =
EmbedderStackStateScope::ExplicitScopeForTesting(
reinterpret_cast<i::Isolate*>(v8_isolate())
->heap()
->local_embedder_heap_tracer(),
EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers);
FullGC(); FullGC();
}
const size_t final_count = global_handles->handles_count(); const size_t final_count = global_handles->handles_count();
// Handles are not black allocated, so `dead` is immediately reclaimed. // Handles are not black allocated, so `dead` is immediately reclaimed.
EXPECT_EQ(initial_count, final_count + 1); EXPECT_EQ(initial_count, final_count + 1);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment