Commit 99dbb750 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Introduce new ThreadState with flags

Change ThreadState representation from a fixed set of values to
either Parked or Running with two additional flags (or bits) that
are used when either a collection or a safepoint requested. Setting
either of these flags forces Park(), Unpark() and Safepoint() into
their slow path.

Currently we use the CollectionRequested flag on the main thread,
while SafepointRequested is used on background threads.

In case the slow path sees the CollectionRequested flag, it will
perform a GC. When encountering the SafepointRequested flag, the
background thread will participate in the safepoint protocol and
park itself for the duration of the safepoint operation.

This CL is a prerequisite for supporting safepoints across multiple
isolates. When safepointing multiple isolates, the main thread will
use both the CollectionRequested and SafepointRequested flag. This
isn't possible with the current system.

Design Doc: https://docs.google.com/document/d/1y6C9zAACEr0sBYMIYk3YpXosnkF3Ak4CEuWJu1-3zXs/edit?usp=sharing

Bug: v8:11708
Change-Id: I16b88740182d9c13bce54be163b334761529a5f0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3211894Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77349}
parent 62418750
......@@ -1322,12 +1322,13 @@ void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
ReduceNewSpaceSize();
}
// Set main thread state back to Running from CollectionRequested.
// Remove CollectionRequested flag from main thread state, as the collection
// was just performed.
safepoint()->AssertActive();
LocalHeap::ThreadState old_state =
main_thread_local_heap()->state_.exchange(LocalHeap::kRunning);
main_thread_local_heap()->state_.ClearCollectionRequested();
CHECK(old_state == LocalHeap::kRunning ||
old_state == LocalHeap::kSafepointRequested);
CHECK(old_state.IsRunning());
// Resume all threads waiting for the GC.
collection_barrier_->ResumeThreadsAwaitingCollection();
......
......@@ -28,8 +28,7 @@ AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type,
alignment == AllocationAlignment::kWordAligned);
Heap::HeapState state = heap()->gc_state();
DCHECK(state == Heap::TEAR_DOWN || state == Heap::NOT_IN_GC);
ThreadState current = state_.load(std::memory_order_relaxed);
DCHECK(current == kRunning || current == kSafepointRequested);
DCHECK(IsRunning());
#endif
// Each allocation is supposed to be a safepoint.
......
......@@ -45,7 +45,7 @@ LocalHeap::LocalHeap(Heap* heap, ThreadKind kind,
std::unique_ptr<PersistentHandles> persistent_handles)
: heap_(heap),
is_main_thread_(kind == ThreadKind::kMain),
state_(kParked),
state_(ThreadState::Parked()),
allocation_failed_(false),
main_thread_parked_(false),
prev_(nullptr),
......@@ -125,8 +125,7 @@ bool LocalHeap::IsHandleDereferenceAllowed() {
#ifdef DEBUG
VerifyCurrent();
#endif
ThreadState state = state_relaxed();
return state == kRunning || state == kSafepointRequested;
return IsRunning();
}
#endif
......@@ -134,45 +133,60 @@ bool LocalHeap::IsParked() {
#ifdef DEBUG
VerifyCurrent();
#endif
ThreadState state = state_relaxed();
return state == kParked || state == kParkedSafepointRequested;
return state_.load_relaxed().IsParked();
}
void LocalHeap::ParkSlowPath(ThreadState current_state) {
if (is_main_thread()) {
while (true) {
CHECK_EQ(current_state, kSafepointRequested);
heap_->CollectGarbageForBackground(this);
bool LocalHeap::IsRunning() {
#ifdef DEBUG
VerifyCurrent();
#endif
return state_.load_relaxed().IsRunning();
}
current_state = kRunning;
if (state_.compare_exchange_strong(current_state, kParked)) {
return;
}
void LocalHeap::ParkSlowPath() {
while (true) {
ThreadState current_state = ThreadState::Running();
if (state_.CompareExchangeStrong(current_state, ThreadState::Parked()))
return;
// CAS above failed, so state is Running with some additional flag.
DCHECK(current_state.IsRunning());
if (is_main_thread()) {
DCHECK(current_state.IsCollectionRequested());
heap_->CollectGarbageForBackground(this);
} else {
DCHECK(current_state.IsSafepointRequested());
DCHECK(!current_state.IsCollectionRequested());
CHECK(state_.CompareExchangeStrong(current_state,
current_state.SetParked()));
heap_->safepoint()->NotifyPark();
return;
}
} else {
CHECK_EQ(current_state, kSafepointRequested);
CHECK(state_.compare_exchange_strong(current_state,
kParkedSafepointRequested));
heap_->safepoint()->NotifyPark();
}
}
void LocalHeap::UnparkSlowPath() {
if (is_main_thread()) {
ThreadState expected = kParkedSafepointRequested;
CHECK(state_.compare_exchange_strong(expected, kSafepointRequested));
heap_->CollectGarbageForBackground(this);
} else {
while (true) {
ThreadState expected = kParked;
if (!state_.compare_exchange_strong(expected, kRunning)) {
CHECK_EQ(expected, kParkedSafepointRequested);
TRACE_GC1(heap_->tracer(), GCTracer::Scope::BACKGROUND_UNPARK,
ThreadKind::kBackground);
heap_->safepoint()->WaitInUnpark();
} else {
return;
}
while (true) {
ThreadState current_state = ThreadState::Parked();
if (state_.CompareExchangeStrong(current_state, ThreadState::Running()))
return;
// CAS above failed, so state is Parked with some additional flag.
DCHECK(current_state.IsParked());
if (is_main_thread()) {
DCHECK(current_state.IsCollectionRequested());
CHECK(state_.CompareExchangeStrong(current_state,
current_state.SetRunning()));
heap_->CollectGarbageForBackground(this);
return;
} else {
DCHECK(current_state.IsSafepointRequested());
DCHECK(!current_state.IsCollectionRequested());
TRACE_GC1(heap_->tracer(), GCTracer::Scope::BACKGROUND_UNPARK,
ThreadKind::kBackground);
heap_->safepoint()->WaitInUnpark();
}
}
}
......@@ -182,18 +196,30 @@ void LocalHeap::EnsureParkedBeforeDestruction() {
}
void LocalHeap::SafepointSlowPath() {
#ifdef DEBUG
ThreadState current_state = state_.load_relaxed();
DCHECK(current_state.IsRunning());
#endif
if (is_main_thread()) {
CHECK_EQ(kSafepointRequested, state_relaxed());
DCHECK(current_state.IsCollectionRequested());
heap_->CollectGarbageForBackground(this);
} else {
DCHECK(current_state.IsSafepointRequested());
DCHECK(!current_state.IsCollectionRequested());
TRACE_GC1(heap_->tracer(), GCTracer::Scope::BACKGROUND_SAFEPOINT,
ThreadKind::kBackground);
ThreadState expected = kSafepointRequested;
CHECK(state_.compare_exchange_strong(expected, kSafepoint));
// Parking the running thread here is an optimization. We do not need to
// wake this thread up to reach the next safepoint.
ThreadState old_state = state_.SetParked();
CHECK(old_state.IsRunning());
CHECK(old_state.IsSafepointRequested());
CHECK(!old_state.IsCollectionRequested());
heap_->safepoint()->WaitInSafepoint();
// This might be a bit surprising, IsolateSafepoint transitions the state
// from Safepoint (--> Running) --> Parked when returning from the
// safepoint.
Unpark();
}
}
......@@ -219,36 +245,20 @@ bool LocalHeap::TryPerformCollection() {
heap_->CollectGarbageForBackground(this);
return true;
} else {
DCHECK(IsRunning());
heap_->collection_barrier_->RequestGC();
LocalHeap* main_thread = heap_->main_thread_local_heap();
ThreadState current = main_thread->state_relaxed();
while (true) {
switch (current) {
case kRunning:
if (main_thread->state_.compare_exchange_strong(
current, kSafepointRequested)) {
return heap_->collection_barrier_->AwaitCollectionBackground(this);
}
break;
case kSafepointRequested:
return heap_->collection_barrier_->AwaitCollectionBackground(this);
case kParked:
if (main_thread->state_.compare_exchange_strong(
current, kParkedSafepointRequested)) {
return false;
}
break;
case kParkedSafepointRequested:
return false;
case kSafepoint:
UNREACHABLE();
}
const ThreadState old_state = main_thread->state_.SetCollectionRequested();
if (old_state.IsRunning()) {
const bool performed_gc =
heap_->collection_barrier_->AwaitCollectionBackground(this);
return performed_gc;
} else {
DCHECK(old_state.IsParked());
return false;
}
}
}
......
......@@ -45,11 +45,9 @@ class V8_EXPORT_PRIVATE LocalHeap {
// from the main thread.
void Safepoint() {
DCHECK(AllowSafepoints::IsAllowed());
ThreadState current = state_relaxed();
ThreadState current = state_.load_relaxed();
// The following condition checks for both kSafepointRequested (background
// thread) and kCollectionRequested (main thread).
if (V8_UNLIKELY(current == kSafepointRequested)) {
if (V8_UNLIKELY(current.IsRunningWithSlowPathFlag())) {
SafepointSlowPath();
}
}
......@@ -89,6 +87,7 @@ class V8_EXPORT_PRIVATE LocalHeap {
#endif
bool IsParked();
bool IsRunning();
Heap* heap() { return heap_; }
Heap* AsHeap() { return heap(); }
......@@ -151,26 +150,94 @@ class V8_EXPORT_PRIVATE LocalHeap {
void RemoveGCEpilogueCallback(GCEpilogueCallback* callback, void* data);
private:
enum ThreadState {
// Threads in this state are allowed to access the heap.
kRunning,
// Thread was parked, which means that the thread is not allowed to access
// or manipulate the heap in any way. This is considered to be a safepoint.
kParked,
// SafepointRequested is used for Running threads to force Safepoint() and
// Park() into the slow path.
kSafepointRequested,
// A thread transitions into this state from SafepointRequested when it
// enters a safepoint.
kSafepoint,
// This state is used for Parked background threads and forces Unpark() into
// the slow path. It prevents Unpark() to succeed before the safepoint
// operation is finished.
kParkedSafepointRequested,
using ParkedBit = base::BitField8<bool, 0, 1>;
using SafepointRequestedBit = ParkedBit::Next<bool, 1>;
using CollectionRequestedBit = SafepointRequestedBit::Next<bool, 1>;
class ThreadState final {
public:
static constexpr ThreadState Parked() {
return ThreadState(ParkedBit::kMask);
}
static constexpr ThreadState Running() { return ThreadState(0); }
constexpr bool IsRunning() const { return !ParkedBit::decode(raw_state_); }
constexpr ThreadState SetRunning() const V8_WARN_UNUSED_RESULT {
return ThreadState(raw_state_ & ~ParkedBit::kMask);
}
constexpr bool IsParked() const { return ParkedBit::decode(raw_state_); }
constexpr ThreadState SetParked() const V8_WARN_UNUSED_RESULT {
return ThreadState(ParkedBit::kMask | raw_state_);
}
constexpr bool IsSafepointRequested() const {
return SafepointRequestedBit::decode(raw_state_);
}
constexpr bool IsCollectionRequested() const {
return CollectionRequestedBit::decode(raw_state_);
}
constexpr bool IsRunningWithSlowPathFlag() const {
return IsRunning() && (raw_state_ & (SafepointRequestedBit::kMask |
CollectionRequestedBit::kMask));
}
private:
constexpr explicit ThreadState(uint8_t value) : raw_state_(value) {}
uint8_t raw() const { return raw_state_; }
uint8_t raw_state_;
friend class LocalHeap;
};
ThreadState state_relaxed() { return state_.load(std::memory_order_relaxed); }
class AtomicThreadState final {
public:
constexpr explicit AtomicThreadState(ThreadState state)
: raw_state_(state.raw()) {}
bool CompareExchangeStrong(ThreadState& expected, ThreadState updated) {
return raw_state_.compare_exchange_strong(expected.raw_state_,
updated.raw());
}
bool CompareExchangeWeak(ThreadState& expected, ThreadState updated) {
return raw_state_.compare_exchange_weak(expected.raw_state_,
updated.raw());
}
ThreadState SetParked() {
return ThreadState(raw_state_.fetch_or(ParkedBit::kMask));
}
ThreadState SetSafepointRequested() {
return ThreadState(raw_state_.fetch_or(SafepointRequestedBit::kMask));
}
ThreadState ClearSafepointRequested() {
return ThreadState(raw_state_.fetch_and(~SafepointRequestedBit::kMask));
}
ThreadState SetCollectionRequested() {
return ThreadState(raw_state_.fetch_or(CollectionRequestedBit::kMask));
}
ThreadState ClearCollectionRequested() {
return ThreadState(raw_state_.fetch_and(~CollectionRequestedBit::kMask));
}
ThreadState load_relaxed() const {
return ThreadState(raw_state_.load(std::memory_order_relaxed));
}
private:
std::atomic<uint8_t> raw_state_;
};
// Slow path of allocation that performs GC and then retries allocation in
// loop.
......@@ -181,21 +248,21 @@ class V8_EXPORT_PRIVATE LocalHeap {
void Park() {
DCHECK(AllowGarbageCollection::IsAllowed());
ThreadState expected = kRunning;
if (!state_.compare_exchange_strong(expected, kParked)) {
ParkSlowPath(expected);
ThreadState expected = ThreadState::Running();
if (!state_.CompareExchangeWeak(expected, ThreadState::Parked())) {
ParkSlowPath();
}
}
void Unpark() {
DCHECK(AllowGarbageCollection::IsAllowed());
ThreadState expected = kParked;
if (!state_.compare_exchange_strong(expected, kRunning)) {
ThreadState expected = ThreadState::Parked();
if (!state_.CompareExchangeWeak(expected, ThreadState::Running())) {
UnparkSlowPath();
}
}
void ParkSlowPath(ThreadState state);
void ParkSlowPath();
void UnparkSlowPath();
void EnsureParkedBeforeDestruction();
void SafepointSlowPath();
......@@ -207,7 +274,7 @@ class V8_EXPORT_PRIVATE LocalHeap {
Heap* heap_;
bool is_main_thread_;
std::atomic<ThreadState> state_;
AtomicThreadState state_;
bool allocation_failed_;
bool main_thread_parked_;
......
......@@ -46,23 +46,13 @@ void IsolateSafepoint::EnterSafepointScope(StopMainThread stop_main_thread) {
continue;
}
LocalHeap::ThreadState expected = local_heap->state_relaxed();
while (true) {
CHECK(expected == LocalHeap::kParked || expected == LocalHeap::kRunning);
LocalHeap::ThreadState new_state =
expected == LocalHeap::kParked ? LocalHeap::kParkedSafepointRequested
: LocalHeap::kSafepointRequested;
if (local_heap->state_.compare_exchange_strong(expected, new_state)) {
if (expected == LocalHeap::kRunning) {
running++;
} else {
CHECK_EQ(expected, LocalHeap::kParked);
}
break;
}
}
const LocalHeap::ThreadState old_state =
local_heap->state_.SetSafepointRequested();
if (old_state.IsRunning()) running++;
CHECK_IMPLIES(old_state.IsCollectionRequested(),
local_heap->is_main_thread());
CHECK(!old_state.IsSafepointRequested());
}
barrier_.WaitUntilRunningThreadsInSafepoint(running);
......@@ -83,17 +73,13 @@ void IsolateSafepoint::LeaveSafepointScope(StopMainThread stop_main_thread) {
continue;
}
// We transition both ParkedSafepointRequested and Safepoint states to
// Parked. While this is probably intuitive for ParkedSafepointRequested,
// this might be surprising for Safepoint though. SafepointSlowPath() will
// later unpark that thread again. Going through Parked means that a
// background thread doesn't need to be waked up before the main thread can
// start the next safepoint.
LocalHeap::ThreadState old_state =
local_heap->state_.exchange(LocalHeap::kParked);
CHECK(old_state == LocalHeap::kParkedSafepointRequested ||
old_state == LocalHeap::kSafepoint);
const LocalHeap::ThreadState old_state =
local_heap->state_.ClearSafepointRequested();
CHECK(old_state.IsParked());
CHECK(old_state.IsSafepointRequested());
CHECK_IMPLIES(old_state.IsCollectionRequested(),
local_heap->is_main_thread());
}
barrier_.Disarm();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment