Commit 5c92b06e authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: young-gen: Fix compilation and tests with cppgc_enable_young_gen

The CL prepares the sources and the tests for enabling
cppgc_enable_young_generation by default. The static initializer
in YoungGenerationEnabler (due to v8::base::Mutex) changed to be lazy.
The tests are now checking the runtime flag.

Bug: chromium:1029379
Change-Id: I1497a3dd2b8d62c1acd48496821f07324b7944d5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3616726Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Auto-Submit: Anton Bikineev <bikineev@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80304}
parent d489e88c
...@@ -14,6 +14,10 @@ ...@@ -14,6 +14,10 @@
#include "cppgc/platform.h" #include "cppgc/platform.h"
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
#if __cpp_lib_bitopts
#include <bit>
#endif // __cpp_lib_bitopts
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -57,7 +61,15 @@ class V8_EXPORT AgeTable final { ...@@ -57,7 +61,15 @@ class V8_EXPORT AgeTable final {
private: private:
V8_INLINE size_t card(uintptr_t offset) const { V8_INLINE size_t card(uintptr_t offset) const {
constexpr size_t kGranularityBits = constexpr size_t kGranularityBits =
#if __cpp_lib_bitopts
std::countr_zero(static_cast<uint32_t>(kCardSizeInBytes));
#elif V8_HAS_BUILTIN_CTZ
__builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes)); __builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes));
#else //! V8_HAS_BUILTIN_CTZ
// Hardcode and check with assert.
9;
#endif // !V8_HAS_BUILTIN_CTZ
static_assert((1 << kGranularityBits) == kCardSizeInBytes);
const size_t entry = offset >> kGranularityBits; const size_t entry = offset >> kGranularityBits;
CPPGC_DCHECK(table_.size() > entry); CPPGC_DCHECK(table_.size() > entry);
return entry; return entry;
......
...@@ -23,13 +23,6 @@ namespace internal { ...@@ -23,13 +23,6 @@ namespace internal {
// static // static
AtomicEntryFlag WriteBarrier::write_barrier_enabled_; AtomicEntryFlag WriteBarrier::write_barrier_enabled_;
#if defined(CPPGC_YOUNG_GENERATION)
// static
size_t YoungGenerationEnabler::is_enabled_;
// static
v8::base::LeakyObject<v8::base::Mutex> YoungGenerationEnabler::mutex_;
#endif // defined(CPPGC_YOUNG_GENERATION)
namespace { namespace {
template <MarkerBase::WriteBarrierType type> template <MarkerBase::WriteBarrierType type>
...@@ -207,9 +200,17 @@ bool WriteBarrierTypeForCagedHeapPolicy::IsMarking( ...@@ -207,9 +200,17 @@ bool WriteBarrierTypeForCagedHeapPolicy::IsMarking(
#endif // CPPGC_CAGED_HEAP #endif // CPPGC_CAGED_HEAP
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
// static
YoungGenerationEnabler& YoungGenerationEnabler::Instance() {
static v8::base::LeakyObject<YoungGenerationEnabler> instance;
return *instance.get();
}
void YoungGenerationEnabler::Enable() { void YoungGenerationEnabler::Enable() {
v8::base::LockGuard _(mutex_.get()); auto& instance = Instance();
if (++is_enabled_ == 1) { v8::base::LockGuard _(&instance.mutex_);
if (++instance.is_enabled_ == 1) {
// Enter the flag so that the check in the write barrier will always trigger // Enter the flag so that the check in the write barrier will always trigger
// when young generation is enabled. // when young generation is enabled.
WriteBarrier::FlagUpdater::Enter(); WriteBarrier::FlagUpdater::Enter();
...@@ -217,17 +218,20 @@ void YoungGenerationEnabler::Enable() { ...@@ -217,17 +218,20 @@ void YoungGenerationEnabler::Enable() {
} }
void YoungGenerationEnabler::Disable() { void YoungGenerationEnabler::Disable() {
v8::base::LockGuard _(mutex_.get()); auto& instance = Instance();
DCHECK_LT(0, is_enabled_); v8::base::LockGuard _(&instance.mutex_);
if (--is_enabled_ == 0) { DCHECK_LT(0, instance.is_enabled_);
if (--instance.is_enabled_ == 0) {
WriteBarrier::FlagUpdater::Exit(); WriteBarrier::FlagUpdater::Exit();
} }
} }
bool YoungGenerationEnabler::IsEnabled() { bool YoungGenerationEnabler::IsEnabled() {
v8::base::LockGuard _(mutex_.get()); auto& instance = Instance();
return is_enabled_; v8::base::LockGuard _(&instance.mutex_);
return instance.is_enabled_;
} }
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)
} // namespace internal } // namespace internal
......
...@@ -30,10 +30,15 @@ class V8_EXPORT_PRIVATE YoungGenerationEnabler final { ...@@ -30,10 +30,15 @@ class V8_EXPORT_PRIVATE YoungGenerationEnabler final {
static bool IsEnabled(); static bool IsEnabled();
private: private:
YoungGenerationEnabler() = delete; template <typename T>
friend class v8::base::LeakyObject;
static size_t is_enabled_; static YoungGenerationEnabler& Instance();
static v8::base::LeakyObject<v8::base::Mutex> mutex_;
YoungGenerationEnabler() = default;
size_t is_enabled_;
v8::base::Mutex mutex_;
}; };
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)
......
...@@ -158,13 +158,14 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfNormalPage) { ...@@ -158,13 +158,14 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfNormalPage) {
// Wait for concurrent sweeping to finish. // Wait for concurrent sweeping to finish.
WaitForConcurrentSweeping(); WaitForConcurrentSweeping();
#if !defined(CPPGC_YOUNG_GENERATION) const auto& hoh = HeapObjectHeader::FromObject(marked_object);
// Check that the marked object was unmarked. if (Heap::From(GetHeap())->generational_gc_supported()) {
EXPECT_FALSE(HeapObjectHeader::FromObject(marked_object).IsMarked()); // Check that the marked object is still marked.
#else EXPECT_TRUE(hoh.IsMarked());
// Check that the marked object is still marked. } else {
EXPECT_TRUE(HeapObjectHeader::FromObject(marked_object).IsMarked()); // Check that the marked object was unmarked.
#endif EXPECT_FALSE(hoh.IsMarked());
}
// Check that free list entries are created right away for non-finalizable // Check that free list entries are created right away for non-finalizable
// objects, but not immediately returned to the space's freelist. // objects, but not immediately returned to the space's freelist.
...@@ -198,13 +199,14 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) { ...@@ -198,13 +199,14 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) {
// Wait for concurrent sweeping to finish. // Wait for concurrent sweeping to finish.
WaitForConcurrentSweeping(); WaitForConcurrentSweeping();
#if !defined(CPPGC_YOUNG_GENERATION) const auto& hoh = HeapObjectHeader::FromObject(marked_object);
// Check that the marked object was unmarked. if (Heap::From(GetHeap())->generational_gc_supported()) {
EXPECT_FALSE(HeapObjectHeader::FromObject(marked_object).IsMarked()); // Check that the marked object is still marked.
#else EXPECT_TRUE(hoh.IsMarked());
// Check that the marked object is still marked. } else {
EXPECT_TRUE(HeapObjectHeader::FromObject(marked_object).IsMarked()); // Check that the marked object was unmarked.
#endif EXPECT_FALSE(hoh.IsMarked());
}
// The page should not have been removed on the background threads. // The page should not have been removed on the background threads.
EXPECT_TRUE(PageInBackend(unmarked_page)); EXPECT_TRUE(PageInBackend(unmarked_page));
...@@ -341,13 +343,14 @@ TEST_F(ConcurrentSweeperTest, IncrementalSweeping) { ...@@ -341,13 +343,14 @@ TEST_F(ConcurrentSweeperTest, IncrementalSweeping) {
GetPlatform().RunAllForegroundTasks(); GetPlatform().RunAllForegroundTasks();
EXPECT_EQ(2u, g_destructor_callcount); EXPECT_EQ(2u, g_destructor_callcount);
#if !defined(CPPGC_YOUNG_GENERATION)
EXPECT_FALSE(marked_normal_header.IsMarked()); if (Heap::From(GetHeap())->generational_gc_supported()) {
EXPECT_FALSE(marked_large_header.IsMarked()); EXPECT_TRUE(marked_normal_header.IsMarked());
#else EXPECT_TRUE(marked_large_header.IsMarked());
EXPECT_TRUE(marked_normal_header.IsMarked()); } else {
EXPECT_TRUE(marked_large_header.IsMarked()); EXPECT_FALSE(marked_normal_header.IsMarked());
#endif EXPECT_FALSE(marked_large_header.IsMarked());
}
FinishSweeping(); FinishSweeping();
} }
......
...@@ -47,7 +47,6 @@ class DynamicallySized final : public GarbageCollected<DynamicallySized> { ...@@ -47,7 +47,6 @@ class DynamicallySized final : public GarbageCollected<DynamicallySized> {
} // namespace } // namespace
TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) { TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
#if !defined(CPPGC_YOUNG_GENERATION)
auto* o = auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle()); MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space()); const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
...@@ -65,11 +64,9 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) { ...@@ -65,11 +64,9 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
// LAB is included in allocated object size, so no change is expected. // LAB is included in allocated object size, so no change is expected.
EXPECT_EQ(allocated_size_before, AllocatedObjectSize()); EXPECT_EQ(allocated_size_before, AllocatedObjectSize());
EXPECT_FALSE(space.free_list().ContainsForTesting({needle, size})); EXPECT_FALSE(space.free_list().ContainsForTesting({needle, size}));
#endif //! defined(CPPGC_YOUNG_GENERATION)
} }
TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) { TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
#if !defined(CPPGC_YOUNG_GENERATION)
auto* o = auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle()); MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space()); const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
...@@ -85,11 +82,9 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) { ...@@ -85,11 +82,9 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
EXPECT_EQ(lab.start(), nullptr); EXPECT_EQ(lab.start(), nullptr);
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize()); EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
EXPECT_TRUE(space.free_list().ContainsForTesting({needle, size})); EXPECT_TRUE(space.free_list().ContainsForTesting({needle, size}));
#endif //! defined(CPPGC_YOUNG_GENERATION)
} }
TEST_F(ExplicitManagementTest, FreeLargeObject) { TEST_F(ExplicitManagementTest, FreeLargeObject) {
#if !defined(CPPGC_YOUNG_GENERATION)
auto* o = MakeGarbageCollected<DynamicallySized>( auto* o = MakeGarbageCollected<DynamicallySized>(
GetHeap()->GetAllocationHandle(), GetHeap()->GetAllocationHandle(),
AdditionalBytes(kLargeObjectSizeThreshold)); AdditionalBytes(kLargeObjectSizeThreshold));
...@@ -103,11 +98,9 @@ TEST_F(ExplicitManagementTest, FreeLargeObject) { ...@@ -103,11 +98,9 @@ TEST_F(ExplicitManagementTest, FreeLargeObject) {
subtle::FreeUnreferencedObject(GetHeapHandle(), *o); subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_FALSE(heap.page_backend()->Lookup(needle)); EXPECT_FALSE(heap.page_backend()->Lookup(needle));
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize()); EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
#endif //! defined(CPPGC_YOUNG_GENERATION)
} }
TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) { TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) {
#if !defined(CPPGC_YOUNG_GENERATION)
const size_t snapshot_before = AllocatedObjectSize(); const size_t snapshot_before = AllocatedObjectSize();
auto* o = auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle()); MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
...@@ -120,7 +113,6 @@ TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) { ...@@ -120,7 +113,6 @@ TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) {
ResetLinearAllocationBuffers(); ResetLinearAllocationBuffers();
subtle::FreeUnreferencedObject(GetHeapHandle(), *o); subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_EQ(snapshot_before, AllocatedObjectSize()); EXPECT_EQ(snapshot_before, AllocatedObjectSize());
#endif //! defined(CPPGC_YOUNG_GENERATION)
} }
TEST_F(ExplicitManagementTest, GrowAtLAB) { TEST_F(ExplicitManagementTest, GrowAtLAB) {
......
...@@ -261,13 +261,13 @@ TEST_F(SweeperTest, UnmarkObjects) { ...@@ -261,13 +261,13 @@ TEST_F(SweeperTest, UnmarkObjects) {
Sweep(); Sweep();
#if !defined(CPPGC_YOUNG_GENERATION) if (Heap::From(GetHeap())->generational_gc_supported()) {
EXPECT_FALSE(normal_object_header.IsMarked()); EXPECT_TRUE(normal_object_header.IsMarked());
EXPECT_FALSE(large_object_header.IsMarked()); EXPECT_TRUE(large_object_header.IsMarked());
#else } else {
EXPECT_TRUE(normal_object_header.IsMarked()); EXPECT_FALSE(normal_object_header.IsMarked());
EXPECT_TRUE(large_object_header.IsMarked()); EXPECT_FALSE(large_object_header.IsMarked());
#endif }
} }
TEST_F(SweeperTest, LazySweepingDuringAllocation) { TEST_F(SweeperTest, LazySweepingDuringAllocation) {
......
...@@ -351,11 +351,10 @@ TEST_F(NoWriteBarrierTest, WriteBarrierBailoutWhenMarkingIsOff) { ...@@ -351,11 +351,10 @@ TEST_F(NoWriteBarrierTest, WriteBarrierBailoutWhenMarkingIsOff) {
{ {
EXPECT_FALSE(object1->IsMarked()); EXPECT_FALSE(object1->IsMarked());
WriteBarrierParams params; WriteBarrierParams params;
#if defined(CPPGC_YOUNG_GENERATION) const WriteBarrierType expected =
WriteBarrierType expected = WriteBarrierType::kGenerational; Heap::From(GetHeap())->generational_gc_supported()
#else // !CPPGC_YOUNG_GENERATION ? WriteBarrierType::kGenerational
WriteBarrierType expected = WriteBarrierType::kNone; : WriteBarrierType::kNone;
#endif // !CPPGC_YOUNG_GENERATION
EXPECT_EQ(expected, HeapConsistency::GetWriteBarrierType( EXPECT_EQ(expected, HeapConsistency::GetWriteBarrierType(
object2->next_ref().GetSlotForTesting(), object2->next_ref().GetSlotForTesting(),
object2->next_ref().Get(), params)); object2->next_ref().Get(), params));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment