Commit cb0e1242 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Make Heap::gc_state_ relaxed atomic

Fix data race between concurrent threads allocating (accessing gc_state_
that way) and the main thread starting tear down.

Bug: v8:10315
Change-Id: Icc24811e43268512c8d7fdaf92ecd3fc7b3ecd57
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2297390Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68853}
parent a9059632
......@@ -189,7 +189,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK_IMPLIES(type == AllocationType::kCode,
alignment == AllocationAlignment::kCodeAligned);
DCHECK_EQ(gc_state_, NOT_IN_GC);
DCHECK_EQ(gc_state(), NOT_IN_GC);
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
if (!always_allocate() && Heap::allocation_timeout_-- <= 0) {
......@@ -281,7 +281,7 @@ HeapObject Heap::AllocateRawWith(int size, AllocationType allocation,
DCHECK(!result.IsRetry());
return result.ToObjectChecked();
}
DCHECK_EQ(gc_state_, NOT_IN_GC);
DCHECK_EQ(gc_state(), NOT_IN_GC);
Heap* heap = isolate()->heap();
Address* top = heap->NewSpaceAllocationTopAddress();
Address* limit = heap->NewSpaceAllocationLimitAddress();
......@@ -414,7 +414,7 @@ bool Heap::InYoungGeneration(HeapObject heap_object) {
// If the object is in the young generation, then it's not in RO_SPACE so
// this is safe.
Heap* heap = Heap::FromWritableHeapObject(heap_object);
DCHECK_IMPLIES(heap->gc_state_ == NOT_IN_GC, InToPage(heap_object));
DCHECK_IMPLIES(heap->gc_state() == NOT_IN_GC, InToPage(heap_object));
}
#endif
return result;
......
......@@ -4,6 +4,7 @@
#include "src/heap/heap.h"
#include <atomic>
#include <cinttypes>
#include <iomanip>
#include <memory>
......@@ -449,7 +450,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
}
void Heap::SetGCState(HeapState state) {
gc_state_ = state;
gc_state_.store(state, std::memory_order_relaxed);
}
void Heap::PrintShortHeapStatistics() {
......@@ -832,7 +833,7 @@ void Heap::GarbageCollectionPrologue() {
UpdateMaximumCommitted();
#ifdef DEBUG
DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state() == NOT_IN_GC);
if (FLAG_gc_verbose) Print();
#endif // DEBUG
......@@ -5471,7 +5472,7 @@ void Heap::NotifyOldGenerationExpansion(AllocationSpace space,
}
void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
DCHECK_EQ(gc_state_, HeapState::NOT_IN_GC);
DCHECK_EQ(gc_state(), HeapState::NOT_IN_GC);
local_embedder_heap_tracer()->SetRemoteTracer(tracer);
}
......@@ -5528,7 +5529,7 @@ void Heap::StartTearDown() {
}
void Heap::TearDown() {
DCHECK_EQ(gc_state_, TEAR_DOWN);
DCHECK_EQ(gc_state(), TEAR_DOWN);
// It's too late for Heap::Verify() here, as parts of the Isolate are
// already gone by the time this is called.
......
......@@ -5,6 +5,7 @@
#ifndef V8_HEAP_HEAP_H_
#define V8_HEAP_HEAP_H_
#include <atomic>
#include <cmath>
#include <map>
#include <memory>
......@@ -584,9 +585,11 @@ class Heap {
return unprotected_memory_chunks_registry_enabled_;
}
inline HeapState gc_state() { return gc_state_; }
inline HeapState gc_state() const {
return gc_state_.load(std::memory_order_relaxed);
}
void SetGCState(HeapState state);
bool IsTearingDown() const { return gc_state_ == TEAR_DOWN; }
bool IsTearingDown() const { return gc_state() == TEAR_DOWN; }
inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
......@@ -2051,7 +2054,7 @@ class Heap {
// Holds the number of open CodeSpaceMemoryModificationScopes.
uintptr_t code_space_memory_modification_scope_depth_ = 0;
HeapState gc_state_ = NOT_IN_GC;
std::atomic<HeapState> gc_state_{NOT_IN_GC};
int gc_post_processing_depth_ = 0;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment