Commit 847f6d9a authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

heap: Fix TSAN race when setting a flag after page initialization

HAS_PROGRESS_BAR is set after page initialization at which point all
flags are assumed to be immutable while a GC is running.

Separating out the progress bar from flags allows setting it lazily at
allocation time.

Bug: v8:11915
Change-Id: I48a877e0e80d583d7a0fadef2546fc70417806e7
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3085268
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76382}
parent 975af4d1
......@@ -2774,6 +2774,7 @@ v8_header_set("v8_internal_headers") {
"src/heap/paged-spaces.h",
"src/heap/parallel-work-item.h",
"src/heap/parked-scope.h",
"src/heap/progress-bar.h",
"src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.h",
"src/heap/read-only-spaces.h",
......
......@@ -44,12 +44,6 @@ class BasicMemoryChunk {
EVACUATION_CANDIDATE = 1u << 6,
NEVER_EVACUATE = 1u << 7,
// Large objects can have a progress bar in their page header. These object
// are scanned in increments and will be kept black while being scanned.
// Even if the mutator writes to them they will be kept black and a white
// to grey transition is performed in the value.
HAS_PROGRESS_BAR = 1u << 8,
// |PAGE_NEW_OLD_PROMOTION|: A page tagged with this flag has been promoted
// from new to old space during evacuation.
PAGE_NEW_OLD_PROMOTION = 1u << 9,
......
......@@ -809,8 +809,7 @@ HeapObject FactoryBase<Impl>::AllocateRawArray(int size,
(size >
isolate()->heap()->AsHeap()->MaxRegularHeapObjectSize(allocation)) &&
FLAG_use_marking_progress_bar) {
BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(result);
chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
LargePage::FromHeapObject(result)->ProgressBar().Enable();
}
return result;
}
......
......@@ -513,8 +513,7 @@ MaybeHandle<FixedArray> Factory::TryNewFixedArray(
if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
if ((size > heap->MaxRegularHeapObjectSize(allocation_type)) &&
FLAG_use_marking_progress_bar) {
BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(result);
chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
LargePage::FromHeapObject(result)->ProgressBar().Enable();
}
DisallowGarbageCollection no_gc;
result.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
......
......@@ -230,7 +230,7 @@ void OldLargeObjectSpace::ClearMarkingStateOfLiveObjects() {
Marking::MarkWhite(marking_state->MarkBitFrom(obj));
MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
RememberedSet<OLD_TO_NEW>::FreeEmptyBuckets(chunk);
chunk->ResetProgressBar();
chunk->ProgressBar().ResetIfEnabled();
marking_state->SetLiveBytes(chunk, 0);
}
DCHECK(marking_state->IsWhite(obj));
......
......@@ -1707,9 +1707,8 @@ void MarkCompactCollector::VisitObject(HeapObject obj) {
void MarkCompactCollector::RevisitObject(HeapObject obj) {
DCHECK(marking_state()->IsBlack(obj));
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->IsFlagSet(
MemoryChunk::HAS_PROGRESS_BAR),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar());
DCHECK_IMPLIES(MemoryChunk::FromHeapObject(obj)->ProgressBar().IsEnabled(),
0u == MemoryChunk::FromHeapObject(obj)->ProgressBar().Value());
MarkingVisitor::RevisitScope revisit(marking_visitor_.get());
marking_visitor_->Visit(obj.map(), obj);
}
......
......@@ -8,6 +8,7 @@
#include "src/heap/marking-visitor.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h"
#include "src/heap/progress-bar.h"
#include "src/heap/spaces.h"
#include "src/objects/objects.h"
#include "src/objects/smi.h"
......@@ -206,13 +207,13 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
VisitFixedArrayWithProgressBar(Map map, FixedArray object,
MemoryChunk* chunk) {
ProgressBar& progress_bar) {
const int kProgressBarScanningChunk = kMaxRegularHeapObjectSize;
STATIC_ASSERT(kMaxRegularHeapObjectSize % kTaggedSize == 0);
DCHECK(concrete_visitor()->marking_state()->IsBlackOrGrey(object));
concrete_visitor()->marking_state()->GreyToBlack(object);
int size = FixedArray::BodyDescriptor::SizeOf(map, object);
size_t current_progress_bar = chunk->ProgressBar();
size_t current_progress_bar = progress_bar.Value();
int start = static_cast<int>(current_progress_bar);
if (start == 0) {
this->VisitMapPointer(object);
......@@ -221,7 +222,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
int end = std::min(size, start + kProgressBarScanningChunk);
if (start < end) {
VisitPointers(object, object.RawField(start), object.RawField(end));
bool success = chunk->TrySetProgressBar(current_progress_bar, end);
bool success = progress_bar.TrySetNewValue(current_progress_bar, end);
CHECK(success);
if (end < size) {
// The object can be pushed back onto the marking worklist only after
......@@ -237,9 +238,10 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitFixedArray(
Map map, FixedArray object) {
// Arrays with the progress bar are not left-trimmable because they reside
// in the large object space.
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
return chunk->IsFlagSet<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR)
? VisitFixedArrayWithProgressBar(map, object, chunk)
ProgressBar& progress_bar =
MemoryChunk::FromHeapObject(object)->ProgressBar();
return progress_bar.IsEnabled()
? VisitFixedArrayWithProgressBar(map, object, progress_bar)
: concrete_visitor()->VisitLeftTrimmableArray(map, object);
}
......
......@@ -193,7 +193,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
template <typename T>
int VisitEmbedderTracingSubclass(Map map, T object);
V8_INLINE int VisitFixedArrayWithProgressBar(Map map, FixedArray object,
MemoryChunk* chunk);
ProgressBar& progress_bar);
// Marks the descriptor array black without pushing it on the marking work
// list and visits its header. Returns the size of the descriptor array
// if it was successully marked as black.
......
......@@ -7,6 +7,7 @@
#include "src/heap/heap.h"
#include "src/heap/list.h"
#include "src/heap/progress-bar.h"
#include "src/heap/slot-set.h"
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
......@@ -50,7 +51,7 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout {
FIELD(VirtualMemory, Reservation),
// MemoryChunk fields:
FIELD(SlotSet* [kNumSets], SlotSet),
FIELD(std::atomic<size_t>, ProgressBar),
FIELD(ProgressBar, ProgressBar),
FIELD(std::atomic<intptr_t>, LiveByteCount),
FIELD(SlotSet*, SweepingSlotSet),
FIELD(TypedSlotsSet* [kNumSets], TypedSlotSet),
......
......@@ -130,7 +130,7 @@ MemoryChunk* MemoryChunk::Initialize(BasicMemoryChunk* basic_chunk, Heap* heap,
// Not actually used but initialize anyway for predictability.
chunk->invalidated_slots_[OLD_TO_CODE] = nullptr;
}
chunk->progress_bar_ = 0;
chunk->progress_bar_.Initialize();
chunk->set_concurrent_sweeping_state(ConcurrentSweepingState::kDone);
chunk->page_protection_change_mutex_ = new base::Mutex();
chunk->write_unprotect_counter_ = 0;
......
......@@ -162,22 +162,10 @@ class MemoryChunk : public BasicMemoryChunk {
// Approximate amount of physical memory committed for this chunk.
V8_EXPORT_PRIVATE size_t CommittedPhysicalMemory();
size_t ProgressBar() {
DCHECK(IsFlagSet<AccessMode::ATOMIC>(HAS_PROGRESS_BAR));
return progress_bar_.load(std::memory_order_acquire);
}
bool TrySetProgressBar(size_t old_value, size_t new_value) {
DCHECK(IsFlagSet<AccessMode::ATOMIC>(HAS_PROGRESS_BAR));
return progress_bar_.compare_exchange_strong(old_value, new_value,
std::memory_order_acq_rel);
}
void ResetProgressBar() {
if (IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
progress_bar_.store(0, std::memory_order_release);
}
class ProgressBar& ProgressBar() {
return progress_bar_;
}
const class ProgressBar& ProgressBar() const { return progress_bar_; }
inline void IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
size_t amount);
......@@ -256,9 +244,9 @@ class MemoryChunk : public BasicMemoryChunk {
// is ceil(size() / kPageSize).
SlotSet* slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
// Used by the incremental marker to keep track of the scanning progress in
// large objects that have a progress bar and are scanned in increments.
std::atomic<size_t> progress_bar_;
// Used by the marker to keep track of the scanning progress in large objects
// that have a progress bar and are scanned in increments.
class ProgressBar progress_bar_;
// Count of bytes marked black on page.
std::atomic<intptr_t> live_byte_count_;
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_PROGRESS_BAR_H_
#define V8_HEAP_PROGRESS_BAR_H_
#include <atomic>
#include <cstdint>
#include "src/base/logging.h"
namespace v8 {
namespace internal {
// The progress bar allows for keeping track of the bytes processed of a single
// object. The progress bar itself must be enabled before it's used.
//
// Only large objects use the progress bar which is stored in their page header.
// These objects are scanned in increments and will be kept black while being
// scanned. Even if the mutator writes to them they will be kept black and a
// white to grey transition is performed in the value.
//
// The progress bar starts as disabled. After enabling (through `Enable()`), it
// can never be disabled again.
class ProgressBar final {
public:
void Initialize() { value_ = kDisabledSentinel; }
void Enable() { value_ = 0; }
bool IsEnabled() const {
return value_.load(std::memory_order_acquire) != kDisabledSentinel;
}
size_t Value() const {
DCHECK(IsEnabled());
return value_.load(std::memory_order_acquire);
}
bool TrySetNewValue(size_t old_value, size_t new_value) {
DCHECK(IsEnabled());
DCHECK_NE(kDisabledSentinel, new_value);
return value_.compare_exchange_strong(old_value, new_value,
std::memory_order_acq_rel);
}
void ResetIfEnabled() {
if (IsEnabled()) {
value_.store(0, std::memory_order_release);
}
}
private:
static constexpr size_t kDisabledSentinel = SIZE_MAX;
std::atomic<size_t> value_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_PROGRESS_BAR_H_
......@@ -5685,8 +5685,9 @@ TEST(Regress598319) {
marking->Step(kSmallStepSizeInMs,
i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
StepOrigin::kV8);
if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->ProgressBar() > 0) {
CHECK_NE(page->ProgressBar(), arr.get().Size());
ProgressBar& progress_bar = page->ProgressBar();
if (progress_bar.IsEnabled() && progress_bar.Value() > 0) {
CHECK_NE(progress_bar.Value(), arr.get().Size());
{
// Shift by 1, effectively moving one white object across the progress
// bar, meaning that we will miss marking it.
......
......@@ -327,6 +327,7 @@ v8_source_set("unittests_sources") {
"heap/memory-reducer-unittest.cc",
"heap/object-stats-unittest.cc",
"heap/persistent-handles-unittest.cc",
"heap/progressbar-unittest.cc",
"heap/safepoint-unittest.cc",
"heap/slot-set-unittest.cc",
"heap/spaces-unittest.cc",
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/progress-bar.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace internal {
TEST(ProgressBar, DefaultDisabled) {
ProgressBar progress_bar;
progress_bar.Initialize();
EXPECT_FALSE(progress_bar.IsEnabled());
}
TEST(ProgressBar, EnabledAfterExplicitEnable) {
ProgressBar progress_bar;
progress_bar.Initialize();
progress_bar.Enable();
EXPECT_TRUE(progress_bar.IsEnabled());
}
TEST(ProgressBar, ZeroValueAfterEnable) {
ProgressBar progress_bar;
progress_bar.Initialize();
progress_bar.Enable();
ASSERT_TRUE(progress_bar.IsEnabled());
EXPECT_EQ(0u, progress_bar.Value());
}
TEST(ProgressBar, TrySetValue) {
ProgressBar progress_bar;
progress_bar.Initialize();
progress_bar.Enable();
ASSERT_TRUE(progress_bar.IsEnabled());
EXPECT_TRUE(progress_bar.TrySetNewValue(0, 17));
EXPECT_EQ(17u, progress_bar.Value());
}
TEST(ProgressBar, MultipleTrySetValue) {
ProgressBar progress_bar;
progress_bar.Initialize();
progress_bar.Enable();
ASSERT_TRUE(progress_bar.IsEnabled());
EXPECT_TRUE(progress_bar.TrySetNewValue(0, 23));
EXPECT_EQ(23u, progress_bar.Value());
EXPECT_TRUE(progress_bar.TrySetNewValue(23, 127));
EXPECT_EQ(127u, progress_bar.Value());
}
TEST(ProgressBar, ResetIfEnabledOnDisabled) {
ProgressBar progress_bar;
progress_bar.Initialize();
progress_bar.ResetIfEnabled();
EXPECT_FALSE(progress_bar.IsEnabled());
}
TEST(ProgressBar, ResetIfEnabledOnEnabled) {
ProgressBar progress_bar;
progress_bar.Initialize();
progress_bar.Enable();
ASSERT_TRUE(progress_bar.TrySetNewValue(0, 1));
progress_bar.ResetIfEnabled();
ASSERT_TRUE(progress_bar.IsEnabled());
EXPECT_EQ(0u, progress_bar.Value());
}
#ifdef DEBUG
TEST(ProgressBarDeathTest, DiesOnTrySetValueOnDisabled) {
ProgressBar progress_bar;
progress_bar.Initialize();
EXPECT_DEATH_IF_SUPPORTED(progress_bar.TrySetNewValue(0, 1), "");
}
#endif // DEBUG
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment