Commit 929b83fb authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Attach to shared isolate after setting up main thread

Attach to the shared isolate after the main thread was set up. Otherwise
it could happen that a shared GC initiated from another isolate might
see no threads are running and performs the safepoint operation in the
middle of isolate deserialization.

We use DisallowSafepoints to check that the isolate doesn't join a
global safepoint before deserialization is complete. DisallowSafepoints
used to prevent only invocations of Safepoint() but was updated to
also prevent Park() and Unpark() invocations. Each state change could
cause the thread to reach a safepoint, which would allow a shared GC
to run.

We now also DCHECK that every isolate has at least one local heap and
that shared collections aren't started before deserialization is
complete.

Bug: v8:11708
Change-Id: Iba3fb59dd951d5ee4fc9934158062287302fc279
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3221157
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarShu-yu Guo <syg@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77424}
parent 42d3ff64
......@@ -8481,7 +8481,7 @@ void Isolate::Initialize(Isolate* isolate,
}
if (params.experimental_attach_to_shared_isolate != nullptr) {
i_isolate->AttachToSharedIsolate(reinterpret_cast<i::Isolate*>(
i_isolate->set_shared_isolate(reinterpret_cast<i::Isolate*>(
params.experimental_attach_to_shared_isolate));
}
......
......@@ -3125,6 +3125,7 @@ bool Isolate::LogObjectRelocation() {
void Isolate::Deinit() {
TRACE_ISOLATE(deinit);
DisallowHeapAllocation no_allocation;
tracing_cpu_profiler_.reset();
if (FLAG_stress_sampling_allocation_profiler > 0) {
......@@ -3210,9 +3211,7 @@ void Isolate::Deinit() {
main_thread_local_isolate_->heap()->FreeLinearAllocationArea();
if (shared_isolate_) {
DetachFromSharedIsolate();
}
DetachFromSharedIsolate();
heap_.TearDown();
......@@ -3659,12 +3658,6 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
date_cache_ = new DateCache();
heap_profiler_ = new HeapProfiler(heap());
interpreter_ = new interpreter::Interpreter(this);
if (OwnsStringTable()) {
string_table_ = std::make_shared<StringTable>(this);
} else {
DCHECK_NOT_NULL(shared_isolate_);
string_table_ = shared_isolate_->string_table_;
}
bigint_processor_ = bigint::Processor::New(new BigIntPlatform(this));
compiler_dispatcher_ = new LazyCompileDispatcher(
......@@ -3684,12 +3677,33 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
stack_guard()->InitThread(lock);
}
// Create LocalIsolate/LocalHeap for the main thread and set state to Running.
main_thread_local_isolate_.reset(new LocalIsolate(this, ThreadKind::kMain));
main_thread_local_heap()->Unpark();
// The main thread LocalHeap needs to be set up when attaching to the shared
// isolate. Otherwise a global safepoint would find an isolate without
// LocalHeaps and not wait until this thread is ready for a GC.
AttachToSharedIsolate();
// We need to ensure that we do not let a shared GC run before this isolate is
// fully set up.
DisallowSafepoints no_shared_gc;
// SetUp the object heap.
DCHECK(!heap_.HasBeenSetUp());
heap_.SetUp();
heap_.SetUp(main_thread_local_heap());
ReadOnlyHeap::SetUp(this, read_only_snapshot_data, can_rehash);
heap_.SetUpSpaces();
if (OwnsStringTable()) {
string_table_ = std::make_shared<StringTable>(this);
} else {
// Only refer to shared string table after attaching to the shared isolate.
DCHECK_NOT_NULL(shared_isolate());
string_table_ = shared_isolate()->string_table_;
}
if (V8_SHORT_BUILTIN_CALLS_BOOL && FLAG_short_builtin_calls) {
// Check if the system has more than 4GB of physical memory by comparing the
// old space size with respective threshold value.
......@@ -3714,14 +3728,6 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
}
}
// Create LocalIsolate/LocalHeap for the main thread and set state to Running.
main_thread_local_isolate_.reset(new LocalIsolate(this, ThreadKind::kMain));
main_thread_local_heap()->Unpark();
heap_.InitializeMainThreadLocalHeap(main_thread_local_heap());
if (shared_isolate_) heap()->InitSharedSpaces();
isolate_data_.external_reference_table()->Init(this);
#if V8_ENABLE_WEBASSEMBLY
......@@ -5163,19 +5169,30 @@ Address Isolate::store_to_stack_count_address(const char* function_name) {
return reinterpret_cast<Address>(&map[name].second);
}
void Isolate::AttachToSharedIsolate(Isolate* shared) {
DCHECK(shared->is_shared());
DCHECK_NULL(shared_isolate_);
DCHECK(!heap_.HasBeenSetUp());
shared->AppendAsClientIsolate(this);
shared_isolate_ = shared;
void Isolate::AttachToSharedIsolate() {
DCHECK(!attached_to_shared_isolate_);
if (shared_isolate_) {
DCHECK(shared_isolate_->is_shared());
shared_isolate_->AppendAsClientIsolate(this);
}
#if DEBUG
attached_to_shared_isolate_ = true;
#endif // DEBUG
}
void Isolate::DetachFromSharedIsolate() {
DCHECK_NOT_NULL(shared_isolate_);
shared_isolate_->RemoveAsClientIsolate(this);
shared_isolate_ = nullptr;
heap()->DeinitSharedSpaces();
DCHECK(attached_to_shared_isolate_);
if (shared_isolate_) {
shared_isolate_->RemoveAsClientIsolate(this);
shared_isolate_ = nullptr;
}
#if DEBUG
attached_to_shared_isolate_ = false;
#endif // DEBUG
}
void Isolate::AppendAsClientIsolate(Isolate* client) {
......
......@@ -1822,11 +1822,18 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
using IsDebugActive = HasAsyncEventDelegate::Next<bool, 1>;
};
bool is_shared() { return is_shared_; }
Isolate* shared_isolate() { return shared_isolate_; }
bool is_shared() const { return is_shared_; }
Isolate* shared_isolate() const {
DCHECK(attached_to_shared_isolate_);
return shared_isolate_;
}
void AttachToSharedIsolate(Isolate* shared);
void DetachFromSharedIsolate();
void set_shared_isolate(Isolate* shared_isolate) {
DCHECK(shared_isolate->is_shared());
DCHECK_NULL(shared_isolate_);
DCHECK(!attached_to_shared_isolate_);
shared_isolate_ = shared_isolate;
}
bool HasClientIsolates() const { return client_isolate_head_; }
......@@ -1962,6 +1969,12 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// Returns the Exception sentinel.
Object ThrowInternal(Object exception, MessageLocation* location);
// These methods add/remove the isolate to/from the list of clients in the
// shared isolate. Isolates in the client list need to participate in a global
// safepoint.
void AttachToSharedIsolate();
void DetachFromSharedIsolate();
// Methods for appending and removing to/from client isolates list.
void AppendAsClientIsolate(Isolate* client);
void RemoveAsClientIsolate(Isolate* client);
......@@ -2257,6 +2270,13 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// isolates or when no shared isolate is used.
Isolate* shared_isolate_ = nullptr;
#if DEBUG
// Set to true once during isolate initialization right when attaching to the
// shared isolate. If there was no shared isolate given it will still be set
// to true. After this point invocations of shared_isolate() are valid.
bool attached_to_shared_isolate_ = false;
#endif // DEBUG
// A shared isolate will use these two fields to track all its client
// isolates.
base::Mutex client_isolate_mutex_;
......
......@@ -2240,6 +2240,7 @@ size_t Heap::PerformGarbageCollection(
}
void Heap::CollectSharedGarbage(GarbageCollectionReason gc_reason) {
CHECK(deserialization_complete());
DCHECK(!IsShared());
DCHECK_NOT_NULL(isolate()->shared_isolate());
......@@ -2266,6 +2267,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
: IsolateSafepoint::StopMainThread::kYes;
client_heap->safepoint()->EnterSafepointScope(stop_main_thread);
DCHECK(client_heap->deserialization_complete());
client_heap->shared_old_allocator_->FreeLinearAllocationArea();
client_heap->shared_map_allocator_->FreeLinearAllocationArea();
......@@ -5503,7 +5505,10 @@ HeapObject Heap::AllocateRawWithRetryOrFailSlowPath(
FatalProcessOutOfMemory("CALL_AND_RETRY_LAST");
}
void Heap::SetUp() {
void Heap::SetUp(LocalHeap* main_thread_local_heap) {
DCHECK_NULL(main_thread_local_heap_);
main_thread_local_heap_ = main_thread_local_heap;
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
allocation_timeout_ = NextAllocationTimeout();
#endif
......@@ -5696,11 +5701,18 @@ void Heap::SetUpSpaces() {
}
write_protect_code_memory_ = FLAG_write_protect_code_memory;
}
void Heap::InitializeMainThreadLocalHeap(LocalHeap* main_thread_local_heap) {
DCHECK_NULL(main_thread_local_heap_);
main_thread_local_heap_ = main_thread_local_heap;
if (isolate()->shared_isolate()) {
shared_old_space_ = isolate()->shared_isolate()->heap()->old_space();
shared_old_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_));
shared_map_space_ = isolate()->shared_isolate()->heap()->map_space();
shared_map_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_));
}
main_thread_local_heap()->SetUpAllocators();
}
void Heap::InitializeHashSeed() {
......@@ -5983,6 +5995,12 @@ void Heap::TearDown() {
allocation_sites_to_pretenure_.reset();
shared_old_space_ = nullptr;
shared_old_allocator_.reset();
shared_map_space_ = nullptr;
shared_map_allocator_.reset();
for (int i = FIRST_MUTABLE_SPACE; i <= LAST_MUTABLE_SPACE; i++) {
delete space_[i];
space_[i] = nullptr;
......@@ -6004,24 +6022,6 @@ void Heap::TearDown() {
memory_allocator_.reset();
}
void Heap::InitSharedSpaces() {
shared_old_space_ = isolate()->shared_isolate()->heap()->old_space();
shared_old_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_));
shared_map_space_ = isolate()->shared_isolate()->heap()->map_space();
shared_map_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_));
}
void Heap::DeinitSharedSpaces() {
shared_old_space_ = nullptr;
shared_old_allocator_.reset();
shared_map_space_ = nullptr;
shared_map_allocator_.reset();
}
void Heap::AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
GCType gc_type, void* data) {
DCHECK_NOT_NULL(callback);
......
......@@ -840,7 +840,7 @@ class Heap {
void ConfigureHeapDefault();
// Prepares the heap, setting up for deserialization.
void SetUp();
void SetUp(LocalHeap* main_thread_local_heap);
// Sets read-only heap and space.
void SetUpFromReadOnlyHeap(ReadOnlyHeap* ro_heap);
......@@ -872,12 +872,6 @@ class Heap {
// Returns whether SetUp has been called.
bool HasBeenSetUp() const;
// Initialializes shared spaces.
void InitSharedSpaces();
// Removes shared spaces again.
void DeinitSharedSpaces();
// ===========================================================================
// Getters for spaces. =======================================================
// ===========================================================================
......
......@@ -13,6 +13,7 @@
#include "src/execution/isolate.h"
#include "src/handles/local-handles.h"
#include "src/heap/collection-barrier.h"
#include "src/heap/concurrent-allocator.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-write-barrier.h"
......@@ -52,8 +53,7 @@ LocalHeap::LocalHeap(Heap* heap, ThreadKind kind,
next_(nullptr),
handles_(new LocalHandles),
persistent_handles_(std::move(persistent_handles)),
marking_barrier_(new MarkingBarrier(this)),
old_space_allocator_(this, heap->old_space()) {
marking_barrier_(new MarkingBarrier(this)) {
heap_->safepoint()->AddLocalHeap(this, [this] {
if (!is_main_thread()) {
WriteBarrier::SetForThread(marking_barrier_.get());
......@@ -69,6 +69,8 @@ LocalHeap::LocalHeap(Heap* heap, ThreadKind kind,
}
DCHECK_NULL(current_local_heap);
if (!is_main_thread()) current_local_heap = this;
SetUpAllocators();
}
LocalHeap::~LocalHeap() {
......@@ -76,7 +78,7 @@ LocalHeap::~LocalHeap() {
EnsureParkedBeforeDestruction();
heap_->safepoint()->RemoveLocalHeap(this, [this] {
old_space_allocator_.FreeLinearAllocationArea();
old_space_allocator_->FreeLinearAllocationArea();
if (!is_main_thread()) {
marking_barrier_->Publish();
......@@ -92,6 +94,19 @@ LocalHeap::~LocalHeap() {
DCHECK(gc_epilogue_callbacks_.empty());
}
void LocalHeap::SetUpAllocators() {
DCHECK_NULL(old_space_allocator_);
if (!heap_->old_space()) {
DCHECK(is_main_thread());
DCHECK(!heap_->deserialization_complete());
return;
}
old_space_allocator_ =
std::make_unique<ConcurrentAllocator>(this, heap_->old_space());
}
void LocalHeap::EnsurePersistentHandles() {
if (!persistent_handles_) {
persistent_handles_.reset(
......@@ -225,19 +240,19 @@ void LocalHeap::SafepointSlowPath() {
}
void LocalHeap::FreeLinearAllocationArea() {
old_space_allocator_.FreeLinearAllocationArea();
old_space_allocator_->FreeLinearAllocationArea();
}
void LocalHeap::MakeLinearAllocationAreaIterable() {
old_space_allocator_.MakeLinearAllocationAreaIterable();
old_space_allocator_->MakeLinearAllocationAreaIterable();
}
void LocalHeap::MarkLinearAllocationAreaBlack() {
old_space_allocator_.MarkLinearAllocationAreaBlack();
old_space_allocator_->MarkLinearAllocationAreaBlack();
}
void LocalHeap::UnmarkLinearAllocationArea() {
old_space_allocator_.UnmarkLinearAllocationArea();
old_space_allocator_->UnmarkLinearAllocationArea();
}
bool LocalHeap::TryPerformCollection() {
......
......@@ -93,7 +93,9 @@ class V8_EXPORT_PRIVATE LocalHeap {
Heap* AsHeap() { return heap(); }
MarkingBarrier* marking_barrier() { return marking_barrier_.get(); }
ConcurrentAllocator* old_space_allocator() { return &old_space_allocator_; }
ConcurrentAllocator* old_space_allocator() {
return old_space_allocator_.get();
}
// Mark/Unmark linear allocation areas black. Used for black allocation.
void MarkLinearAllocationAreaBlack();
......@@ -247,7 +249,7 @@ class V8_EXPORT_PRIVATE LocalHeap {
AllocationAlignment alignment);
void Park() {
DCHECK(AllowGarbageCollection::IsAllowed());
DCHECK(AllowSafepoints::IsAllowed());
ThreadState expected = ThreadState::Running();
if (!state_.CompareExchangeWeak(expected, ThreadState::Parked())) {
ParkSlowPath();
......@@ -255,7 +257,7 @@ class V8_EXPORT_PRIVATE LocalHeap {
}
void Unpark() {
DCHECK(AllowGarbageCollection::IsAllowed());
DCHECK(AllowSafepoints::IsAllowed());
ThreadState expected = ThreadState::Parked();
if (!state_.CompareExchangeWeak(expected, ThreadState::Running())) {
UnparkSlowPath();
......@@ -271,6 +273,8 @@ class V8_EXPORT_PRIVATE LocalHeap {
void InvokeGCEpilogueCallbacksInSafepoint();
void SetUpAllocators();
Heap* heap_;
bool is_main_thread_;
......@@ -288,7 +292,7 @@ class V8_EXPORT_PRIVATE LocalHeap {
std::vector<std::pair<GCEpilogueCallback*, void*>> gc_epilogue_callbacks_;
ConcurrentAllocator old_space_allocator_;
std::unique_ptr<ConcurrentAllocator> old_space_allocator_;
friend class CollectionBarrier;
friend class ConcurrentAllocator;
......
......@@ -39,6 +39,9 @@ void IsolateSafepoint::EnterSafepointScope(StopMainThread stop_main_thread) {
int running = 0;
// There needs to be at least one LocalHeap for the main thread.
DCHECK_NOT_NULL(local_heaps_head_);
for (LocalHeap* local_heap = local_heaps_head_; local_heap;
local_heap = local_heap->next_) {
if (local_heap->is_main_thread() &&
......
......@@ -159,15 +159,14 @@ class TestSerializer {
SnapshotData shared_space_snapshot(blobs.shared_space);
const bool kEnableSerializer = false;
const bool kGenerateHeap = false;
if (is_shared) CHECK_NULL(shared_isolate);
CHECK_IMPLIES(is_shared, !shared_isolate);
v8::Isolate* v8_isolate =
NewIsolate(kEnableSerializer, kGenerateHeap, is_shared);
v8::Isolate::Scope isolate_scope(v8_isolate);
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
if (shared_isolate) {
CHECK(!is_shared);
isolate->AttachToSharedIsolate(
reinterpret_cast<Isolate*>(shared_isolate));
isolate->set_shared_isolate(reinterpret_cast<Isolate*>(shared_isolate));
}
isolate->Init(&startup_snapshot, &read_only_snapshot,
&shared_space_snapshot, false);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment