Commit 6f9df3e7 authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: young-gen: Support young generation with pointer compression

Currently, young generation assumes that all the on-heap pointers are
compressed, which is currently not the case due to the backing-store
pointers in Blink. Fixing collections to have Member<> is not easy,
because the inlined collections may have the semantics that
 backing_pointer_ == inlined_buffer_;
where the inlined_buffer_ can be off-heap (e.g. on stack).

The CL introduces another type of barrier specifically for uncompressed
pointers. The followup is to use that barrier from Blink.

Bug: chromium:1029379
Change-Id: If0f519220658268dbdf915235c2e5afd7887dc0c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3695358Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81105}
parent dfe65b90
......@@ -146,7 +146,25 @@ class HeapConsistency final {
*/
static V8_INLINE void GenerationalBarrier(const WriteBarrierParams& params,
const void* slot) {
internal::WriteBarrier::GenerationalBarrier(params, slot);
internal::WriteBarrier::GenerationalBarrier<
internal::WriteBarrier::GenerationalBarrierType::kPreciseSlot>(params,
slot);
}
/**
* Generational barrier for maintaining consistency when running with multiple
* generations. This version is used when slot contains uncompressed pointer.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param slot Uncompressed slot containing the direct pointer to the object.
* The slot itself must reside in an object that has been allocated using
* `MakeGarbageCollected()`.
*/
static V8_INLINE void GenerationalBarrierForUncompressedSlot(
const WriteBarrierParams& params, const void* uncompressed_slot) {
internal::WriteBarrier::GenerationalBarrier<
internal::WriteBarrier::GenerationalBarrierType::
kPreciseUncompressedSlot>(params, uncompressed_slot);
}
/**
......@@ -158,8 +176,9 @@ class HeapConsistency final {
*/
static V8_INLINE void GenerationalBarrierForSourceObject(
const WriteBarrierParams& params, const void* inner_pointer) {
internal::WriteBarrier::GenerationalBarrierForSourceObject(params,
inner_pointer);
internal::WriteBarrier::GenerationalBarrier<
internal::WriteBarrier::GenerationalBarrierType::kImpreciseSlot>(
params, inner_pointer);
}
private:
......
......@@ -35,7 +35,8 @@ struct DijkstraWriteBarrierPolicy {
WriteBarrier::Params params;
switch (WriteBarrier::GetWriteBarrierType(slot, value, params)) {
case WriteBarrier::Type::kGenerational:
WriteBarrier::GenerationalBarrier(params, slot);
WriteBarrier::GenerationalBarrier<
WriteBarrier::GenerationalBarrierType::kPreciseSlot>(params, slot);
break;
case WriteBarrier::Type::kMarking:
WriteBarrier::DijkstraMarkingBarrier(params, value);
......
......@@ -42,6 +42,12 @@ class V8_EXPORT WriteBarrier final {
kGenerational,
};
enum class GenerationalBarrierType : uint8_t {
kPreciseSlot,
kPreciseUncompressedSlot,
kImpreciseSlot,
};
struct Params {
HeapHandle* heap = nullptr;
#if V8_ENABLE_CHECKS
......@@ -76,15 +82,13 @@ class V8_EXPORT WriteBarrier final {
static V8_INLINE void SteeleMarkingBarrier(const Params& params,
const void* object);
#if defined(CPPGC_YOUNG_GENERATION)
template <GenerationalBarrierType>
static V8_INLINE void GenerationalBarrier(const Params& params,
const void* slot);
static V8_INLINE void GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer);
#else // !CPPGC_YOUNG_GENERATION
template <GenerationalBarrierType>
static V8_INLINE void GenerationalBarrier(const Params& params,
const void* slot) {}
static V8_INLINE void GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer) {}
const void* slot){};
#endif // CPPGC_YOUNG_GENERATION
#if V8_ENABLE_CHECKS
......@@ -123,6 +127,9 @@ class V8_EXPORT WriteBarrier final {
const AgeTable& age_table,
const void* slot, uintptr_t value_offset,
HeapHandle* heap_handle);
static void GenerationalBarrierForUncompressedSlotSlow(
const CagedHeapLocalData& local_data, const AgeTable& age_table,
const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
static void GenerationalBarrierForSourceObjectSlow(
const CagedHeapLocalData& local_data, const void* object,
HeapHandle* heap_handle);
......@@ -388,40 +395,32 @@ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
}
#if defined(CPPGC_YOUNG_GENERATION)
// static
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
CheckParams(Type::kGenerational, params);
const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
const AgeTable& age_table = local_data.age_table;
// Bail out if the slot is in young generation.
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return;
// TODO(chromium:1029379): Consider reload local_data in the slow path to
// reduce register pressure.
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
params.heap);
}
// static
void WriteBarrier::GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer) {
template <WriteBarrier::GenerationalBarrierType type>
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
CheckParams(Type::kGenerational, params);
const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
const AgeTable& age_table = local_data.age_table;
// Assume that if the first element is in young generation, the whole range is
// in young generation.
// Bail out if the slot (precise or imprecise) is in young generation.
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return;
// Dispatch between different types of barriers.
// TODO(chromium:1029379): Consider reload local_data in the slow path to
// reduce register pressure.
GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer,
params.heap);
if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
params.heap);
} else if constexpr (type ==
GenerationalBarrierType::kPreciseUncompressedSlot) {
GenerationalBarrierForUncompressedSlotSlow(
local_data, age_table, slot, params.value_offset, params.heap);
} else {
GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
}
}
#endif // !CPPGC_YOUNG_GENERATION
......
......@@ -18,7 +18,32 @@ namespace internal {
namespace {
enum class SlotType { kCompressed, kUncompressed };
template <SlotType slot_type>
void InvalidateRememberedSlots(std::set<void*>& slots, void* begin, void* end) {
// TODO(1029379): The 2 binary walks can be optimized with a custom algorithm.
auto from = slots.lower_bound(begin), to = slots.lower_bound(end);
slots.erase(from, to);
#if defined(ENABLE_SLOW_DCHECKS)
// Check that no remembered slots are referring to the freed area.
DCHECK(std::none_of(slots.begin(), slots.end(), [begin, end](void* slot) {
void* value = nullptr;
#if defined(CPPGC_POINTER_COMPRESSION)
if constexpr (slot_type == SlotType::kCompressed)
value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot));
else
value = *reinterpret_cast<void**>(slot);
#else // !defined(CPPGC_POINTER_COMPRESSION)
value = *reinterpret_cast<void**>(slot);
#endif // !defined(CPPGC_POINTER_COMPRESSION)
return begin <= value && value < end;
}));
#endif // defined(ENABLE_SLOW_DCHECKS)
}
// Visit remembered set that was recorded in the generational barrier.
template <SlotType slot_type>
void VisitRememberedSlots(const std::set<void*>& slots, const HeapBase& heap,
MutatorMarkingState& mutator_marking_state) {
for (void* slot : slots) {
......@@ -36,9 +61,12 @@ void VisitRememberedSlots(const std::set<void*>& slots, const HeapBase& heap,
DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
#if defined(CPPGC_POINTER_COMPRESSION)
// Transform slot.
void* value =
CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot));
void* value = nullptr;
if constexpr (slot_type == SlotType::kCompressed) {
value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot));
} else {
value = *reinterpret_cast<void**>(slot);
}
#else // !defined(CPPGC_POINTER_COMPRESSION)
void* value = *reinterpret_cast<void**>(slot);
#endif // !defined(CPPGC_POINTER_COMPRESSION)
......@@ -89,6 +117,11 @@ void OldToNewRememberedSet::AddSlot(void* slot) {
remembered_slots_.insert(slot);
}
void OldToNewRememberedSet::AddUncompressedSlot(void* uncompressed_slot) {
DCHECK(heap_.generational_gc_supported());
remembered_uncompressed_slots_.insert(uncompressed_slot);
}
void OldToNewRememberedSet::AddSourceObject(HeapObjectHeader& hoh) {
DCHECK(heap_.generational_gc_supported());
remembered_source_objects_.insert(&hoh);
......@@ -105,18 +138,10 @@ void OldToNewRememberedSet::AddWeakCallback(WeakCallbackItem item) {
void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(void* begin,
void* end) {
DCHECK(heap_.generational_gc_supported());
// TODO(1029379): The 2 binary walks can be optimized with a custom algorithm.
auto from = remembered_slots_.lower_bound(begin),
to = remembered_slots_.lower_bound(end);
remembered_slots_.erase(from, to);
#if defined(ENABLE_SLOW_DCHECKS)
// Check that no remembered slots are referring to the freed area.
DCHECK(std::none_of(remembered_slots_.begin(), remembered_slots_.end(),
[begin, end](void* slot) {
void* value = *reinterpret_cast<void**>(slot);
return begin <= value && value < end;
}));
#endif // defined(ENABLE_SLOW_DCHECKS)
InvalidateRememberedSlots<SlotType::kCompressed>(remembered_slots_, begin,
end);
InvalidateRememberedSlots<SlotType::kUncompressed>(
remembered_uncompressed_slots_, begin, end);
}
void OldToNewRememberedSet::InvalidateRememberedSourceObject(
......@@ -128,7 +153,10 @@ void OldToNewRememberedSet::InvalidateRememberedSourceObject(
void OldToNewRememberedSet::Visit(Visitor& visitor,
MutatorMarkingState& marking_state) {
DCHECK(heap_.generational_gc_supported());
VisitRememberedSlots(remembered_slots_, heap_, marking_state);
VisitRememberedSlots<SlotType::kCompressed>(remembered_slots_, heap_,
marking_state);
VisitRememberedSlots<SlotType::kUncompressed>(remembered_uncompressed_slots_,
heap_, marking_state);
VisitRememberedSourceObjects(remembered_source_objects_, visitor);
}
......@@ -147,11 +175,13 @@ void OldToNewRememberedSet::ReleaseCustomCallbacks() {
void OldToNewRememberedSet::Reset() {
DCHECK(heap_.generational_gc_supported());
remembered_slots_.clear();
remembered_uncompressed_slots_.clear();
remembered_source_objects_.clear();
}
bool OldToNewRememberedSet::IsEmpty() const {
return remembered_slots_.empty() && remembered_source_objects_.empty() &&
return remembered_slots_.empty() && remembered_uncompressed_slots_.empty() &&
remembered_source_objects_.empty() &&
remembered_weak_callbacks_.empty();
}
......
......@@ -32,6 +32,7 @@ class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
OldToNewRememberedSet& operator=(const OldToNewRememberedSet&) = delete;
void AddSlot(void* slot);
void AddUncompressedSlot(void* slot);
void AddSourceObject(HeapObjectHeader& source_hoh);
void AddWeakCallback(WeakCallbackItem);
......@@ -59,6 +60,7 @@ class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
const HeapBase& heap_;
std::set<void*> remembered_slots_;
std::set<void*> remembered_uncompressed_slots_;
std::set<HeapObjectHeader*> remembered_source_objects_;
std::set<WeakCallbackItem, decltype(compare_parameter)>
remembered_weak_callbacks_;
......
......@@ -141,6 +141,26 @@ void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
heap.remembered_set().AddSlot((const_cast<void*>(slot)));
}
// static
void WriteBarrier::GenerationalBarrierForUncompressedSlotSlow(
const CagedHeapLocalData& local_data, const AgeTable& age_table,
const void* slot, uintptr_t value_offset, HeapHandle* heap_handle) {
DCHECK(slot);
DCHECK(heap_handle);
DCHECK_GT(kCagedHeapReservationSize, value_offset);
// A write during atomic pause (e.g. pre-finalizer) may trigger the slow path
// of the barrier. This is a result of the order of bailouts where not marking
// results in applying the generational barrier.
auto& heap = HeapBase::From(*heap_handle);
if (heap.in_atomic_pause()) return;
if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld)
return;
// Record slot.
heap.remembered_set().AddUncompressedSlot((const_cast<void*>(slot)));
}
// static
void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
const CagedHeapLocalData& local_data, const void* inner_pointer,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment