Commit 8370387f authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: young-gen: Prepare infra for young generation

The CL does following:
1) Makes sure young generation works and tests pass;
2) Provides CollectGarbageInYoungGenerationForTesting() that is needed
   to support remaining tests in Blink;
3) Moved cppgc_enable_young_generation GN flag to v8.gni to refer to it
   from Blink;
4) Bails out from marking TracedReferences in UnifiedHeapMarkingState;
5) Disables (temporarily) prompt freeing for young generation;
6) Fixes remembered set visitation for nullptr|kSentinel slots.

Bug: chromium:1029379
Change-Id: I5165fa22c8a0eaa708ef7a35a9978cb12e1cb13e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3429202Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Auto-Submit: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78901}
parent b8b32f6b
......@@ -295,9 +295,6 @@ declare_args() {
# Enable allocations during prefinalizer invocations.
cppgc_allow_allocations_in_prefinalizers = false
# Enable young generation in cppgc.
cppgc_enable_young_generation = false
# Enable V8 zone compression experimental feature.
# Sets -DV8_COMPRESS_ZONES.
v8_enable_zone_compression = ""
......
......@@ -5,8 +5,8 @@
import("//build/config/gclient_args.gni")
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/v8_target_cpu.gni")
import("split_static_library.gni")
import("release_branch_toggle.gni")
import("split_static_library.gni")
declare_args() {
# Set flags for tracking code coverage. Uses gcov with gcc and sanitizer
......@@ -86,6 +86,9 @@ declare_args() {
cppgc_is_standalone = false
# Enable young generation in cppgc.
cppgc_enable_young_generation = false
# Enable advanced BigInt algorithms, costing about 10-30 KB binary size
# depending on platform. Disabled on Android to save binary size.
v8_advanced_bigint_algorithms = !is_android
......
......@@ -144,6 +144,14 @@ class V8_EXPORT CppHeap {
*/
void CollectGarbageForTesting(cppgc::EmbedderStackState stack_state);
/**
* Performs a stop-the-world minor garbage collection for testing purposes.
*
* \param stack_state The stack state to assume for the garbage collection.
*/
void CollectGarbageInYoungGenerationForTesting(
cppgc::EmbedderStackState stack_state);
private:
CppHeap() = default;
......
......@@ -89,7 +89,16 @@ void CppHeap::EnableDetachedGarbageCollectionsForTesting() {
}
void CppHeap::CollectGarbageForTesting(cppgc::EmbedderStackState stack_state) {
return internal::CppHeap::From(this)->CollectGarbageForTesting(stack_state);
return internal::CppHeap::From(this)->CollectGarbageForTesting(
cppgc::internal::GarbageCollector::Config::CollectionType::kMajor,
stack_state);
}
void CppHeap::CollectGarbageInYoungGenerationForTesting(
cppgc::EmbedderStackState stack_state) {
return internal::CppHeap::From(this)->CollectGarbageForTesting(
cppgc::internal::GarbageCollector::Config::CollectionType::kMinor,
stack_state);
}
namespace internal {
......@@ -187,11 +196,11 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
}
cppgc::internal::MarkingStateBase& GetMutatorMarkingState() {
return marking_visitor_.marking_state_;
return marking_visitor_->marking_state_;
}
protected:
cppgc::Visitor& visitor() final { return marking_visitor_; }
cppgc::Visitor& visitor() final { return *marking_visitor_; }
cppgc::internal::ConservativeTracingVisitor& conservative_visitor() final {
return conservative_marking_visitor_;
}
......@@ -201,7 +210,7 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
private:
UnifiedHeapMarkingState unified_heap_marking_state_;
MutatorUnifiedHeapMarkingVisitor marking_visitor_;
std::unique_ptr<MutatorUnifiedHeapMarkingVisitor> marking_visitor_;
cppgc::internal::ConservativeMarkingVisitor conservative_marking_visitor_;
};
......@@ -211,10 +220,15 @@ UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap,
MarkingConfig config)
: cppgc::internal::MarkerBase(heap, platform, config),
unified_heap_marking_state_(v8_heap),
marking_visitor_(heap, mutator_marking_state_,
unified_heap_marking_state_),
marking_visitor_(
config.collection_type == cppgc::internal::GarbageCollector::Config::
CollectionType::kMajor
? std::make_unique<MutatorUnifiedHeapMarkingVisitor>(
heap, mutator_marking_state_, unified_heap_marking_state_)
: std::make_unique<MutatorMinorGCMarkingVisitor>(
heap, mutator_marking_state_, unified_heap_marking_state_)),
conservative_marking_visitor_(heap, mutator_marking_state_,
marking_visitor_) {
*marking_visitor_) {
concurrent_marker_ = std::make_unique<UnifiedHeapConcurrentMarker>(
heap_, marking_worklists_, schedule_, platform_,
unified_heap_marking_state_);
......@@ -422,7 +436,9 @@ bool ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags) {
} // namespace
void CppHeap::InitializeTracing(GarbageCollectionFlags gc_flags) {
void CppHeap::InitializeTracing(
cppgc::internal::GarbageCollector::Config::CollectionType collection_type,
GarbageCollectionFlags gc_flags) {
CHECK(!sweeper_.IsSweepingInProgress());
// Check that previous cycle metrics have been reported.
......@@ -436,8 +452,7 @@ void CppHeap::InitializeTracing(GarbageCollectionFlags gc_flags) {
current_gc_flags_ = gc_flags;
const UnifiedHeapMarker::MarkingConfig marking_config{
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers,
collection_type, cppgc::Heap::StackState::kNoHeapPointers,
(IsForceGC(current_gc_flags_) && !force_incremental_marking_for_testing_)
? UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic
: UnifiedHeapMarker::MarkingConfig::MarkingType::
......@@ -590,6 +605,7 @@ void CppHeap::ReportBufferedAllocationSizeIfPossible() {
}
void CppHeap::CollectGarbageForTesting(
cppgc::internal::GarbageCollector::Config::CollectionType collection_type,
cppgc::internal::GarbageCollector::Config::StackState stack_state) {
if (in_no_gc_scope()) return;
......@@ -606,7 +622,7 @@ void CppHeap::CollectGarbageForTesting(
// Perform an atomic GC, with starting incremental/concurrent marking and
// immediately finalizing the garbage collection.
if (!IsMarking()) {
InitializeTracing(GarbageCollectionFlagValues::kForced);
InitializeTracing(collection_type, GarbageCollectionFlagValues::kForced);
StartTracing();
}
EnterFinalPause(stack_state);
......@@ -629,7 +645,9 @@ void CppHeap::StartIncrementalGarbageCollectionForTesting() {
DCHECK_NULL(isolate_);
if (IsMarking()) return;
force_incremental_marking_for_testing_ = true;
InitializeTracing(GarbageCollectionFlagValues::kForced);
InitializeTracing(
cppgc::internal::GarbageCollector::Config::CollectionType::kMajor,
GarbageCollectionFlagValues::kForced);
StartTracing();
force_incremental_marking_for_testing_ = false;
}
......@@ -640,7 +658,9 @@ void CppHeap::FinalizeIncrementalGarbageCollectionForTesting(
DCHECK_NULL(isolate_);
DCHECK(IsMarking());
if (IsMarking()) {
CollectGarbageForTesting(stack_state);
CollectGarbageForTesting(
cppgc::internal::GarbageCollector::Config::CollectionType::kMajor,
stack_state);
}
sweeper_.FinishIfRunning();
}
......
......@@ -109,6 +109,7 @@ class V8_EXPORT_PRIVATE CppHeap final
void EnableDetachedGarbageCollectionsForTesting();
void CollectGarbageForTesting(
cppgc::internal::GarbageCollector::Config::CollectionType,
cppgc::internal::GarbageCollector::Config::StackState);
void CollectCustomSpaceStatisticsAtLastGC(
......@@ -117,7 +118,9 @@ class V8_EXPORT_PRIVATE CppHeap final
void FinishSweepingIfRunning();
void InitializeTracing(GarbageCollectionFlags);
void InitializeTracing(
cppgc::internal::GarbageCollector::Config::CollectionType,
GarbageCollectionFlags);
void StartTracing();
bool AdvanceTracing(double max_duration);
bool IsTracingDone();
......
......@@ -51,7 +51,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVisitorBase : public JSVisitor {
void HandleMovableReference(const void**) final;
// JS handling.
void Visit(const TracedReferenceBase& ref) final;
void Visit(const TracedReferenceBase& ref) override;
cppgc::internal::BasicMarkingState& marking_state_;
UnifiedHeapMarkingState& unified_heap_marking_state_;
......@@ -59,7 +59,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVisitorBase : public JSVisitor {
friend class UnifiedHeapMarker;
};
class V8_EXPORT_PRIVATE MutatorUnifiedHeapMarkingVisitor final
class V8_EXPORT_PRIVATE MutatorUnifiedHeapMarkingVisitor
: public UnifiedHeapMarkingVisitorBase {
public:
MutatorUnifiedHeapMarkingVisitor(HeapBase&, MutatorMarkingState&,
......@@ -72,6 +72,18 @@ class V8_EXPORT_PRIVATE MutatorUnifiedHeapMarkingVisitor final
const SourceLocation&) final;
};
class V8_EXPORT_PRIVATE MutatorMinorGCMarkingVisitor final
: public MutatorUnifiedHeapMarkingVisitor {
public:
using MutatorUnifiedHeapMarkingVisitor::MutatorUnifiedHeapMarkingVisitor;
~MutatorMinorGCMarkingVisitor() override = default;
protected:
// Override and make the function empty, since we don't want to trace V8
// reference during cppgc's minor GC.
void Visit(const TracedReferenceBase&) final {}
};
class V8_EXPORT_PRIVATE ConcurrentUnifiedHeapMarkingVisitor final
: public UnifiedHeapMarkingVisitorBase {
public:
......
......@@ -27,6 +27,9 @@ bool InGC(HeapHandle& heap_handle) {
} // namespace
void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
// TODO(bikineev): Invalidate slots that reside within |object| and handle free
// values in VisitRememberedSlots.
#if !defined(CPPGC_YOUNG_GENERATION)
if (InGC(heap_handle)) {
return;
}
......@@ -59,6 +62,7 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
// list entry.
}
}
#endif // !defined(CPPGC_YOUNG_GENERATION)
}
namespace {
......
......@@ -230,7 +230,12 @@ size_t HeapObjectHeader::AllocatedSize() const {
}
void HeapObjectHeader::SetAllocatedSize(size_t size) {
#if !defined(CPPGC_YOUNG_GENERATION)
// With sticky bits, marked objects correspond to old objects.
// TODO(bikineev:1029379): Consider disallowing old/marked objects to be
// resized.
DCHECK(!IsMarked());
#endif
encoded_low_ = EncodeSize(size);
}
......
......@@ -77,6 +77,9 @@ void VisitRememberedSlots(HeapBase& heap,
DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
void* value = *reinterpret_cast<void**>(slot);
// Slot could be updated to nullptr or kSentinelPointer by the mutator.
if (value == kSentinelPointer || value == nullptr) continue;
mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
}
#endif
......
......@@ -129,6 +129,7 @@ void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
const AgeTable& age_table,
const void* slot,
uintptr_t value_offset) {
DCHECK(slot);
// A write during atomic pause (e.g. pre-finalizer) may trigger the slow path
// of the barrier. This is a result of the order of bailouts where not marking
// results in applying the generational barrier.
......
......@@ -43,7 +43,10 @@ CppHeap::GarbageCollectionFlags ConvertTraceFlags(
void LocalEmbedderHeapTracer::PrepareForTrace(
EmbedderHeapTracer::TraceFlags flags) {
if (cpp_heap_) cpp_heap()->InitializeTracing(ConvertTraceFlags(flags));
if (cpp_heap_)
cpp_heap()->InitializeTracing(
cppgc::internal::GarbageCollector::Config::CollectionType::kMajor,
ConvertTraceFlags(flags));
}
void LocalEmbedderHeapTracer::TracePrologue(
......
......@@ -47,6 +47,7 @@ class DynamicallySized final : public GarbageCollected<DynamicallySized> {
} // namespace
TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
#if !defined(CPPGC_YOUNG_GENERATION)
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
......@@ -64,9 +65,11 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
// LAB is included in allocated object size, so no change is expected.
EXPECT_EQ(allocated_size_before, AllocatedObjectSize());
EXPECT_FALSE(space.free_list().ContainsForTesting({needle, size}));
#endif //! defined(CPPGC_YOUNG_GENERATION)
}
TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
#if !defined(CPPGC_YOUNG_GENERATION)
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
......@@ -82,9 +85,11 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
EXPECT_EQ(lab.start(), nullptr);
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
EXPECT_TRUE(space.free_list().ContainsForTesting({needle, size}));
#endif //! defined(CPPGC_YOUNG_GENERATION)
}
TEST_F(ExplicitManagementTest, FreeLargeObject) {
#if !defined(CPPGC_YOUNG_GENERATION)
auto* o = MakeGarbageCollected<DynamicallySized>(
GetHeap()->GetAllocationHandle(),
AdditionalBytes(kLargeObjectSizeThreshold));
......@@ -98,9 +103,11 @@ TEST_F(ExplicitManagementTest, FreeLargeObject) {
subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_FALSE(heap.page_backend()->Lookup(needle));
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
#endif //! defined(CPPGC_YOUNG_GENERATION)
}
TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) {
#if !defined(CPPGC_YOUNG_GENERATION)
const size_t snapshot_before = AllocatedObjectSize();
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
......@@ -113,6 +120,7 @@ TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) {
ResetLinearAllocationBuffers();
subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_EQ(snapshot_before, AllocatedObjectSize());
#endif //! defined(CPPGC_YOUNG_GENERATION)
}
TEST_F(ExplicitManagementTest, GrowAtLAB) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment