Commit 3f5c2dda authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: young-gen: Implement GenerationalBarrier for source objects

The generational barrier for source objects records the entire source
object to be processed later during remembered set visitation. It's
planned to be used for Blink backing stores when an inlined object (or a
range thereof) is added (HeapAllocator::NotifyNewObject(s)).

An alternative approach would be to eagerly process the inlined objects
using a custom callback. However, this requires changing Visitors to
bring slots into the context. This approach should better work for
scenarios where small ranges or single elements are added, to avoid
processing potentially large backing stores. The followup CL implements
this idea.

Bug: chromium:1029379
Change-Id: Iacb59e4b10a66354526ed293d7f43f14d8761a8f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3460402Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79073}
parent 620158f8
......@@ -149,6 +149,19 @@ class HeapConsistency final {
internal::WriteBarrier::GenerationalBarrier(params, slot);
}
/**
* Generational barrier for source object that may contain outgoing pointers
* to objects in young generation.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param inner_pointer Pointer to the source object.
*/
static V8_INLINE void GenerationalBarrierForSourceObject(
const WriteBarrierParams& params, const void* inner_pointer) {
internal::WriteBarrier::GenerationalBarrierForSourceObject(params,
inner_pointer);
}
private:
HeapConsistency() = delete;
};
......
......@@ -80,9 +80,13 @@ class V8_EXPORT WriteBarrier final {
#if defined(CPPGC_YOUNG_GENERATION)
static V8_INLINE void GenerationalBarrier(const Params& params,
const void* slot);
#else // !CPPGC_YOUNG_GENERATION
static V8_INLINE void GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer);
#else // !CPPGC_YOUNG_GENERATION
static V8_INLINE void GenerationalBarrier(const Params& params,
const void* slot) {}
static V8_INLINE void GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer) {}
#endif // CPPGC_YOUNG_GENERATION
#if V8_ENABLE_CHECKS
......@@ -120,8 +124,10 @@ class V8_EXPORT WriteBarrier final {
#if defined(CPPGC_YOUNG_GENERATION)
static CagedHeapLocalData& GetLocalData(HeapHandle&);
static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
const AgeTable& ageTable,
const AgeTable& age_table,
const void* slot, uintptr_t value_offset);
static void GenerationalBarrierForSourceObjectSlow(
const CagedHeapLocalData& local_data, const void* object);
#endif // CPPGC_YOUNG_GENERATION
static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
......@@ -404,6 +410,21 @@ void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
}
// static
void WriteBarrier::GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer) {
CheckParams(Type::kGenerational, params);
const CagedHeapLocalData& local_data = params.caged_heap();
const AgeTable& age_table = local_data.age_table;
// Assume that if the first element is in young generation, the whole range is
// in young generation.
if (V8_LIKELY(age_table[params.slot_offset] == AgeTable::Age::kYoung)) return;
GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer);
}
#endif // !CPPGC_YOUNG_GENERATION
} // namespace internal
......
......@@ -25,9 +25,10 @@ bool InGC(HeapHandle& heap_handle) {
heap.sweeper().IsSweepingInProgress();
}
void InvalidateRememberedSlots(HeapBase& heap, void* begin, void* end) {
#if defined(CPPGC_YOUNG_GENERATION)
// Invalidate slots that reside within |object|.
void InvalidateRememberedSlotsInRange(HeapBase& heap, void* begin, void* end) {
// Invalidate slots from |remembered_slots| that reside within |begin| and
// |end|.
auto& remembered_slots = heap.remembered_slots();
// TODO(bikineev): The 2 binary walks can be optimized with a custom
// algorithm.
......@@ -42,8 +43,8 @@ void InvalidateRememberedSlots(HeapBase& heap, void* begin, void* end) {
return begin <= value && value < end;
}));
#endif // ENABLE_SLOW_DCHECKS
#endif // !defined(CPPGC_YOUNG_GENERATION)
}
#endif // !defined(CPPGC_YOUNG_GENERATION)
} // namespace
......@@ -86,8 +87,13 @@ void ExplicitManagementImpl::FreeUnreferencedObject(HeapHandle& heap_handle,
// list entry.
}
}
InvalidateRememberedSlots(HeapBase::From(heap_handle), object,
reinterpret_cast<uint8_t*>(object) + object_size);
#if defined(CPPGC_YOUNG_GENERATION)
auto& heap_base = HeapBase::From(heap_handle);
InvalidateRememberedSlotsInRange(
heap_base, object, reinterpret_cast<uint8_t*>(object) + object_size);
// If this object was registered as remembered, remove it.
heap_base.remembered_source_objects().erase(&header);
#endif // defined(CPPGC_YOUNG_GENERATION)
}
namespace {
......@@ -136,8 +142,10 @@ bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
NormalPage::From(&base_page)->object_start_bitmap().SetBit(free_start);
header.SetAllocatedSize(new_size);
}
InvalidateRememberedSlots(base_page.heap(), free_start,
free_start + size_delta);
#if defined(CPPGC_YOUNG_GENERATION)
InvalidateRememberedSlotsInRange(base_page.heap(), free_start,
free_start + size_delta);
#endif // defined(CPPGC_YOUNG_GENERATION)
// Return success in any case, as we want to avoid that embedders start
// copying memory because of small deltas.
return true;
......
......@@ -137,6 +137,7 @@ void HeapBase::ResetRememberedSet() {
DCHECK(AllLABsAreEmpty(raw_heap()).value());
caged_heap().local_data().age_table.Reset(&caged_heap().allocator());
remembered_slots().clear();
remembered_source_objects().clear();
}
#endif // defined(CPPGC_YOUNG_GENERATION)
......
......@@ -15,6 +15,7 @@
#include "src/base/macros.h"
#include "src/heap/cppgc/compactor.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/object-allocator.h"
......@@ -163,6 +164,9 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
#if defined(CPPGC_YOUNG_GENERATION)
std::set<void*>& remembered_slots() { return remembered_slots_; }
std::set<HeapObjectHeader*>& remembered_source_objects() {
return remembered_source_objects_;
}
#endif // defined(CPPGC_YOUNG_GENERATION)
size_t ObjectPayloadSize() const;
......@@ -260,6 +264,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
allocation_observer_for_PROCESS_HEAP_STATISTICS_;
#if defined(CPPGC_YOUNG_GENERATION)
std::set<void*> remembered_slots_;
std::set<HeapObjectHeader*> remembered_source_objects_;
#endif
size_t no_gc_scope_ = 0;
......
......@@ -60,16 +60,42 @@ bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
return false;
}
// Visits ranges that were recorded in the generational barrier for ranges.
void VisitRememberedObjects(HeapBase& heap, Visitor& visitor,
MutatorMarkingState& mutator_marking_state) {
#if defined(CPPGC_YOUNG_GENERATION)
for (HeapObjectHeader* source_hoh : heap.remembered_source_objects()) {
DCHECK(source_hoh);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (source_hoh->IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!source_hoh->template IsInConstruction<AccessMode::kNonAtomic>());
const TraceCallback trace_callback =
GlobalGCInfoTable::GCInfoFromIndex(source_hoh->GetGCInfoIndex()).trace;
// Process eagerly to avoid reaccounting.
trace_callback(&visitor, source_hoh->ObjectStart());
}
#endif
}
// Visit remembered set that was recorded in the generational barrier.
void VisitRememberedSlots(HeapBase& heap,
MutatorMarkingState& mutator_marking_state) {
#if defined(CPPGC_YOUNG_GENERATION)
StatsCollector::EnabledScope stats_scope(
heap.stats_collector(), StatsCollector::kMarkVisitRememberedSets);
for (void* slot : heap.remembered_slots()) {
// Slot must always point to a valid, not freed object.
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
->ObjectHeaderFromInnerAddress(slot);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (slot_header.IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
......@@ -386,7 +412,10 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
heap().stack()->IteratePointers(&stack_visitor());
}
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
StatsCollector::EnabledScope stats_scope(
heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets);
VisitRememberedSlots(heap(), mutator_marking_state_);
VisitRememberedObjects(heap(), visitor(), mutator_marking_state_);
}
}
......
......@@ -140,6 +140,20 @@ void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
// Record slot.
local_data.heap_base.remembered_slots().insert(const_cast<void*>(slot));
}
// static
void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
const CagedHeapLocalData& local_data, const void* inner_pointer) {
DCHECK(inner_pointer);
auto& object_header =
BasePage::FromInnerAddress(&local_data.heap_base, inner_pointer)
->ObjectHeaderFromInnerAddress(inner_pointer);
// Record the source object.
local_data.heap_base.remembered_source_objects().emplace(
const_cast<HeapObjectHeader*>(&object_header));
}
#endif // CPPGC_YOUNG_GENERATION
#if V8_ENABLE_CHECKS
......
......@@ -4,6 +4,9 @@
#if defined(CPPGC_YOUNG_GENERATION)
#include <initializer_list>
#include <vector>
#include "include/cppgc/allocation.h"
#include "include/cppgc/explicit-management.h"
#include "include/cppgc/heap-consistency.h"
......@@ -19,6 +22,88 @@ namespace internal {
namespace {
bool IsHeapObjectYoung(void* obj) {
return HeapObjectHeader::FromObject(obj).IsYoung();
}
bool IsHeapObjectOld(void* obj) { return !IsHeapObjectYoung(obj); }
struct ExpectRememberedSlotsAdded final {
ExpectRememberedSlotsAdded(
cppgc::Heap* heap,
std::initializer_list<void*> slots_expected_to_be_remembered)
: remembered_slots_(Heap::From(heap)->remembered_slots()),
slots_expected_to_be_remembered_(slots_expected_to_be_remembered),
initial_number_of_slots_(remembered_slots_.size()) {
// Check that the remembered set doesn't contain specified slots.
EXPECT_FALSE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
slots_expected_to_be_remembered_.begin(),
slots_expected_to_be_remembered_.end()));
}
~ExpectRememberedSlotsAdded() {
const size_t current_number_of_slots = remembered_slots_.size();
EXPECT_EQ(
initial_number_of_slots_ + slots_expected_to_be_remembered_.size(),
current_number_of_slots);
EXPECT_TRUE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
slots_expected_to_be_remembered_.begin(),
slots_expected_to_be_remembered_.end()));
}
private:
std::set<void*>& remembered_slots_;
std::set<void*> slots_expected_to_be_remembered_;
const size_t initial_number_of_slots_ = 0;
};
struct ExpectRememberedSlotsRemoved final {
ExpectRememberedSlotsRemoved(
cppgc::Heap* heap,
std::initializer_list<void*> slots_expected_to_be_removed)
: remembered_slots_(Heap::From(heap)->remembered_slots()),
slots_expected_to_be_removed_(slots_expected_to_be_removed),
initial_number_of_slots_(remembered_slots_.size()) {
DCHECK_GE(initial_number_of_slots_, slots_expected_to_be_removed_.size());
// Check that the remembered set does contain specified slots to be removed.
EXPECT_TRUE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
slots_expected_to_be_removed_.begin(),
slots_expected_to_be_removed_.end()));
}
~ExpectRememberedSlotsRemoved() {
const size_t current_number_of_slots = remembered_slots_.size();
EXPECT_EQ(initial_number_of_slots_ - slots_expected_to_be_removed_.size(),
current_number_of_slots);
EXPECT_FALSE(std::includes(remembered_slots_.begin(),
remembered_slots_.end(),
slots_expected_to_be_removed_.begin(),
slots_expected_to_be_removed_.end()));
}
private:
std::set<void*>& remembered_slots_;
std::set<void*> slots_expected_to_be_removed_;
const size_t initial_number_of_slots_ = 0;
};
struct ExpectNoRememberedSlotsAdded final {
explicit ExpectNoRememberedSlotsAdded(cppgc::Heap* heap)
: remembered_slots_(Heap::From(heap)->remembered_slots()),
initial_remembered_slots_(remembered_slots_) {}
~ExpectNoRememberedSlotsAdded() {
EXPECT_EQ(initial_remembered_slots_, remembered_slots_);
}
private:
std::set<void*>& remembered_slots_;
std::set<void*> initial_remembered_slots_;
};
class SimpleGCedBase : public GarbageCollected<SimpleGCedBase> {
public:
static size_t destructed_objects;
......@@ -78,6 +163,13 @@ class MinorGCTestForType : public MinorGCTest {
using Type = SmallOrLarge;
};
template <typename... Args>
void RunMinorGCAndExpectObjectsPromoted(MinorGCTest& test, Args*... args) {
([args] { EXPECT_TRUE(IsHeapObjectYoung(args)); }(), ...);
test.CollectMinor();
([args] { EXPECT_TRUE(IsHeapObjectOld(args)); }(), ...);
}
} // namespace
using ObjectTypes = ::testing::Types<Small, Large>;
......@@ -228,16 +320,16 @@ TYPED_TEST(MinorGCTestForType, OmitGenerationalBarrierForSentinels) {
TestFixture::CollectMinor();
EXPECT_FALSE(HeapObjectHeader::FromObject(old.Get()).IsYoung());
const auto& set = Heap::From(this->GetHeap())->remembered_slots();
const size_t set_size_before_barrier = set.size();
// Try issuing generational barrier for nullptr.
old->next = static_cast<Type*>(nullptr);
EXPECT_EQ(set_size_before_barrier, set.size());
// Try issuing generational barrier for sentinel.
old->next = static_cast<Type*>(kSentinelPointer);
EXPECT_EQ(set_size_before_barrier, set.size());
{
ExpectNoRememberedSlotsAdded _(this->GetHeap());
// Try issuing generational barrier for nullptr.
old->next = static_cast<Type*>(nullptr);
}
{
ExpectNoRememberedSlotsAdded _(this->GetHeap());
// Try issuing generational barrier for sentinel.
old->next = static_cast<Type*>(kSentinelPointer);
}
}
template <typename From, typename To>
......@@ -248,18 +340,20 @@ void TestRememberedSetInvalidation(MinorGCTest& test) {
auto* young = MakeGarbageCollected<To>(test.GetAllocationHandle());
const auto& set = Heap::From(test.GetHeap())->remembered_slots();
const size_t set_size_before_barrier = set.size();
// Issue the generational barrier.
old->next = young;
EXPECT_EQ(set_size_before_barrier + 1, set.size());
{
ExpectRememberedSlotsAdded _(test.GetHeap(),
{old->next.GetSlotForTesting()});
// Issue the generational barrier.
old->next = young;
}
// Release the persistent and free the old object.
auto* old_raw = old.Release();
subtle::FreeUnreferencedObject(test.GetHeapHandle(), *old_raw);
// Check that the reference was invalidated.
EXPECT_EQ(set_size_before_barrier, set.size());
{
ExpectRememberedSlotsRemoved _(test.GetHeap(),
{old->next.GetSlotForTesting()});
// Release the persistent and free the old object.
auto* old_raw = old.Release();
subtle::FreeUnreferencedObject(test.GetHeapHandle(), *old_raw);
}
// Visiting remembered slots must not fail.
test.CollectMinor();
......@@ -278,7 +372,7 @@ TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) {
static constexpr size_t kTrailingMembers = 64;
static constexpr size_t kBytesToAllocate = kTrailingMembers * sizeof(Member);
static constexpr size_t kFirstMemberToInvalidate = 63;
static constexpr size_t kFirstMemberToInvalidate = kTrailingMembers / 2;
static constexpr size_t kLastMemberToInvalidate = kTrailingMembers;
// Create an object with additional kBytesToAllocate bytes.
......@@ -321,6 +415,111 @@ TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) {
CollectMinor();
}
namespace {
template <typename Value>
struct InlinedObject {
struct Inner {
Inner() = default;
explicit Inner(AllocationHandle& handle)
: ref(MakeGarbageCollected<Value>(handle)) {}
void Trace(Visitor* v) const { v->Trace(ref); }
double d = -1.;
Member<Value> ref;
};
InlinedObject() = default;
explicit InlinedObject(AllocationHandle& handle)
: ref(MakeGarbageCollected<Value>(handle)), inner(handle) {}
void Trace(cppgc::Visitor* v) const {
v->Trace(ref);
v->Trace(inner);
}
int a_ = -1;
Member<Value> ref;
Inner inner;
};
template <typename Value>
class GCedWithInlinedArray
: public GarbageCollected<GCedWithInlinedArray<Value>> {
public:
static constexpr size_t kNumObjects = 16;
GCedWithInlinedArray(HeapHandle& heap_handle, AllocationHandle& alloc_handle)
: heap_handle_(heap_handle), alloc_handle_(alloc_handle) {}
using WriteBarrierParams = subtle::HeapConsistency::WriteBarrierParams;
using HeapConsistency = subtle::HeapConsistency;
void SetInPlaceRange(size_t from, size_t to) {
DCHECK_GT(to, from);
DCHECK_GT(kNumObjects, from);
for (; from != to; ++from)
new (&objects[from]) InlinedObject<Value>(alloc_handle_);
GenerationalBarrierForSourceObject(&objects[from]);
}
void Trace(cppgc::Visitor* v) const {
for (const auto& object : objects) v->Trace(object);
}
InlinedObject<Value> objects[kNumObjects];
private:
void GenerationalBarrierForSourceObject(void* object) {
DCHECK(object);
WriteBarrierParams params;
const auto barrier_type = HeapConsistency::GetWriteBarrierType(
object, params, [this]() -> HeapHandle& { return heap_handle_; });
EXPECT_EQ(HeapConsistency::WriteBarrierType::kGenerational, barrier_type);
HeapConsistency::GenerationalBarrierForSourceObject(params, object);
}
HeapHandle& heap_handle_;
AllocationHandle& alloc_handle_;
};
} // namespace
TYPED_TEST(MinorGCTestForType, GenerationalBarrierDeferredTracing) {
using Type = typename TestFixture::Type;
Persistent<GCedWithInlinedArray<Type>> array =
MakeGarbageCollected<GCedWithInlinedArray<Type>>(
this->GetAllocationHandle(), this->GetHeapHandle(),
this->GetAllocationHandle());
this->CollectMinor();
EXPECT_TRUE(IsHeapObjectOld(array.Get()));
const auto& remembered_objects =
Heap::From(this->GetHeap())->remembered_source_objects();
{
ExpectNoRememberedSlotsAdded _(this->GetHeap());
EXPECT_EQ(0u, remembered_objects.count(
&HeapObjectHeader::FromObject(array->objects)));
array->SetInPlaceRange(2, 4);
EXPECT_EQ(1u, remembered_objects.count(
&HeapObjectHeader::FromObject(array->objects)));
}
RunMinorGCAndExpectObjectsPromoted(
*this, array->objects[2].ref.Get(), array->objects[2].inner.ref.Get(),
array->objects[3].ref.Get(), array->objects[3].inner.ref.Get());
EXPECT_EQ(0u, remembered_objects.size());
}
} // namespace internal
} // namespace cppgc
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment