Commit ef0fbafd authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: young-gen: Introduce OldToNewRememberedSet

This CL refactors all remembered set logic from heap-base and
explicit-management to a new class OldToNewRememberedSet.

Bug: chromium:1029379
Change-Id: Id032b9dcc01af6f9bb9e546ed9bc6324da6d9b66
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3472498Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79212}
parent a7a996ab
......@@ -2899,6 +2899,8 @@ filegroup(
"src/heap/cppgc/process-heap-statistics.h",
"src/heap/cppgc/raw-heap.cc",
"src/heap/cppgc/raw-heap.h",
"src/heap/cppgc/remembered-set.cc"
"src/heap/cppgc/remembered-set.h"
"src/heap/cppgc/source-location.cc",
"src/heap/cppgc/stats-collector.cc",
"src/heap/cppgc/stats-collector.h",
......
......@@ -5584,6 +5584,8 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/process-heap.h",
"src/heap/cppgc/raw-heap.cc",
"src/heap/cppgc/raw-heap.h",
"src/heap/cppgc/remembered-set.cc",
"src/heap/cppgc/remembered-set.h",
"src/heap/cppgc/source-location.cc",
"src/heap/cppgc/stats-collector.cc",
"src/heap/cppgc/stats-collector.h",
......
......@@ -25,27 +25,6 @@ bool InGC(HeapHandle& heap_handle) {
heap.sweeper().IsSweepingInProgress();
}
#if defined(CPPGC_YOUNG_GENERATION)
void InvalidateRememberedSlotsInRange(HeapBase& heap, void* begin, void* end) {
// Invalidate slots from |remembered_slots| that reside within |begin| and
// |end|.
auto& remembered_slots = heap.remembered_slots();
// TODO(bikineev): The 2 binary walks can be optimized with a custom
// algorithm.
auto from = remembered_slots.lower_bound(begin),
to = remembered_slots.lower_bound(end);
remembered_slots.erase(from, to);
#ifdef ENABLE_SLOW_DCHECKS
// Check that no remembered slots are referring to the freed area.
DCHECK(std::none_of(remembered_slots.begin(), remembered_slots.end(),
[begin, end](void* slot) {
void* value = *reinterpret_cast<void**>(slot);
return begin <= value && value < end;
}));
#endif // ENABLE_SLOW_DCHECKS
}
#endif // !defined(CPPGC_YOUNG_GENERATION)
} // namespace
void ExplicitManagementImpl::FreeUnreferencedObject(HeapHandle& heap_handle,
......@@ -89,10 +68,10 @@ void ExplicitManagementImpl::FreeUnreferencedObject(HeapHandle& heap_handle,
}
#if defined(CPPGC_YOUNG_GENERATION)
auto& heap_base = HeapBase::From(heap_handle);
InvalidateRememberedSlotsInRange(
heap_base, object, reinterpret_cast<uint8_t*>(object) + object_size);
heap_base.remembered_set().InvalidateRememberedSlotsInRange(
object, reinterpret_cast<uint8_t*>(object) + object_size);
// If this object was registered as remembered, remove it.
heap_base.remembered_source_objects().erase(&header);
heap_base.remembered_set().InvalidateRememberedSourceObject(header);
#endif // defined(CPPGC_YOUNG_GENERATION)
}
......@@ -143,8 +122,8 @@ bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
header.SetAllocatedSize(new_size);
}
#if defined(CPPGC_YOUNG_GENERATION)
InvalidateRememberedSlotsInRange(base_page.heap(), free_start,
free_start + size_delta);
base_page.heap().remembered_set().InvalidateRememberedSlotsInRange(
free_start, free_start + size_delta);
#endif // defined(CPPGC_YOUNG_GENERATION)
// Return success in any case, as we want to avoid that embedders start
// copying memory because of small deltas.
......
......@@ -82,6 +82,9 @@ HeapBase::HeapBase(
weak_persistent_region_(*oom_handler_.get()),
strong_cross_thread_persistent_region_(*oom_handler_.get()),
weak_cross_thread_persistent_region_(*oom_handler_.get()),
#if defined(CPPGC_YOUNG_GENERATION)
remembered_set_(*this),
#endif // defined(CPPGC_YOUNG_GENERATION)
stack_support_(stack_support),
marking_support_(marking_support),
sweeping_support_(sweeping_support) {
......@@ -136,8 +139,7 @@ void HeapBase::ResetRememberedSet() {
};
DCHECK(AllLABsAreEmpty(raw_heap()).value());
caged_heap().local_data().age_table.Reset(&caged_heap().allocator());
remembered_slots().clear();
remembered_source_objects().clear();
remembered_set_.Reset();
}
#endif // defined(CPPGC_YOUNG_GENERATION)
......
......@@ -30,6 +30,10 @@
#include "src/heap/cppgc/caged-heap.h"
#endif
#if defined(CPPGC_YOUNG_GENERATION)
#include "src/heap/cppgc/remembered-set.h"
#endif
namespace v8 {
namespace base {
class LsanPageAllocator;
......@@ -163,10 +167,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
}
#if defined(CPPGC_YOUNG_GENERATION)
std::set<void*>& remembered_slots() { return remembered_slots_; }
std::set<HeapObjectHeader*>& remembered_source_objects() {
return remembered_source_objects_;
}
OldToNewRememberedSet& remembered_set() { return remembered_set_; }
#endif // defined(CPPGC_YOUNG_GENERATION)
size_t ObjectPayloadSize() const;
......@@ -263,9 +264,8 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
ProcessHeapStatisticsUpdater::AllocationObserverImpl
allocation_observer_for_PROCESS_HEAP_STATISTICS_;
#if defined(CPPGC_YOUNG_GENERATION)
std::set<void*> remembered_slots_;
std::set<HeapObjectHeader*> remembered_source_objects_;
#endif
OldToNewRememberedSet remembered_set_;
#endif // defined(CPPGC_YOUNG_GENERATION)
size_t no_gc_scope_ = 0;
size_t disallow_gc_scope_ = 0;
......
......@@ -60,65 +60,6 @@ bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
return false;
}
// Visits ranges that were recorded in the generational barrier for ranges.
void VisitRememberedObjects(HeapBase& heap, Visitor& visitor,
MutatorMarkingState& mutator_marking_state) {
#if defined(CPPGC_YOUNG_GENERATION)
for (HeapObjectHeader* source_hoh : heap.remembered_source_objects()) {
DCHECK(source_hoh);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (source_hoh->IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!source_hoh->template IsInConstruction<AccessMode::kNonAtomic>());
const TraceCallback trace_callback =
GlobalGCInfoTable::GCInfoFromIndex(source_hoh->GetGCInfoIndex()).trace;
// Process eagerly to avoid reaccounting.
trace_callback(&visitor, source_hoh->ObjectStart());
}
#endif
}
// Visit remembered set that was recorded in the generational barrier.
void VisitRememberedSlots(HeapBase& heap,
MutatorMarkingState& mutator_marking_state) {
#if defined(CPPGC_YOUNG_GENERATION)
for (void* slot : heap.remembered_slots()) {
// Slot must always point to a valid, not freed object.
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
->ObjectHeaderFromInnerAddress(slot);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (slot_header.IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
void* value = *reinterpret_cast<void**>(slot);
// Slot could be updated to nullptr or kSentinelPointer by the mutator.
if (value == kSentinelPointer || value == nullptr) continue;
#if DEBUG
// Check that the slot can not point to a freed object.
HeapObjectHeader& header =
BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value);
DCHECK(!header.IsFree());
#endif
mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
}
#endif
}
static constexpr size_t kDefaultDeadlineCheckInterval = 150u;
template <size_t kDeadlineCheckInterval = kDefaultDeadlineCheckInterval,
......@@ -411,12 +352,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
heap().stats_collector(), StatsCollector::kMarkVisitStack);
heap().stack()->IteratePointers(&stack_visitor());
}
#if defined(CPPGC_YOUNG_GENERATION)
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
StatsCollector::EnabledScope stats_scope(
heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets);
VisitRememberedSlots(heap(), mutator_marking_state_);
VisitRememberedObjects(heap(), visitor(), mutator_marking_state_);
heap().remembered_set().Visit(visitor(), mutator_marking_state_);
}
#endif // defined(CPPGC_YOUNG_GENERATION)
}
bool MarkerBase::VisitCrossThreadPersistentsIfNeeded() {
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/remembered-set.h"
#include <algorithm>
#include "include/cppgc/visitor.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/marking-state.h"
namespace cppgc {
namespace internal {
namespace {
// Visit remembered set that was recorded in the generational barrier.
void VisitRememberedSlots(std::set<void*> slots, const HeapBase& heap,
MutatorMarkingState& mutator_marking_state) {
for (void* slot : slots) {
// Slot must always point to a valid, not freed object.
auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
->ObjectHeaderFromInnerAddress(slot);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (slot_header.IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
void* value = *reinterpret_cast<void**>(slot);
// Slot could be updated to nullptr or kSentinelPointer by the mutator.
if (value == kSentinelPointer || value == nullptr) continue;
#if DEBUG
// Check that the slot can not point to a freed object.
HeapObjectHeader& header =
BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value);
DCHECK(!header.IsFree());
#endif
mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
}
}
// Visits source objects that were recorded in the generational barrier for
// slots.
void VisitRememberedSourceObjects(
std::set<HeapObjectHeader*> remembered_source_objects, Visitor& visitor) {
for (HeapObjectHeader* source_hoh : remembered_source_objects) {
DCHECK(source_hoh);
// The age checking in the generational barrier is imprecise, since a card
// may have mixed young/old objects. Check here precisely if the object is
// old.
if (source_hoh->IsYoung()) continue;
// The design of young generation requires collections to be executed at the
// top level (with the guarantee that no objects are currently being in
// construction). This can be ensured by running young GCs from safe points
// or by reintroducing nested allocation scopes that avoid finalization.
DCHECK(!source_hoh->template IsInConstruction<AccessMode::kNonAtomic>());
const TraceCallback trace_callback =
GlobalGCInfoTable::GCInfoFromIndex(source_hoh->GetGCInfoIndex()).trace;
// Process eagerly to avoid reaccounting.
trace_callback(&visitor, source_hoh->ObjectStart());
}
}
} // namespace
void OldToNewRememberedSet::AddSlot(void* slot) {
remembered_slots_.insert(slot);
}
void OldToNewRememberedSet::AddSourceObject(HeapObjectHeader& hoh) {
remembered_source_objects_.insert(&hoh);
}
void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(void* begin,
void* end) {
// TODO(1029379): The 2 binary walks can be optimized with a custom algorithm.
auto from = remembered_slots_.lower_bound(begin),
to = remembered_slots_.lower_bound(end);
remembered_slots_.erase(from, to);
#if defined(ENABLE_SLOW_DCHECKS)
// Check that no remembered slots are referring to the freed area.
DCHECK(std::none_of(remembered_slots_.begin(), remembered_slots_.end(),
[begin, end](void* slot) {
void* value = *reinterpret_cast<void**>(slot);
return begin <= value && value < end;
}));
#endif // defined(ENABLE_SLOW_DCHECKS)
}
void OldToNewRememberedSet::InvalidateRememberedSourceObject(
HeapObjectHeader& header) {
remembered_source_objects_.erase(&header);
}
void OldToNewRememberedSet::Visit(Visitor& visitor,
MutatorMarkingState& marking_state) {
VisitRememberedSlots(remembered_slots_, heap_, marking_state);
VisitRememberedSourceObjects(remembered_source_objects_, visitor);
}
void OldToNewRememberedSet::Reset() {
remembered_slots_.clear();
remembered_source_objects_.clear();
}
} // namespace internal
} // namespace cppgc
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_REMEMBERED_SET_H_
#define V8_HEAP_CPPGC_REMEMBERED_SET_H_
#include <set>
#include "src/base/macros.h"
namespace cppgc {
class Visitor;
namespace internal {
class HeapBase;
class HeapObjectHeader;
class MutatorMarkingState;
class V8_EXPORT_PRIVATE OldToNewRememberedSet final {
public:
explicit OldToNewRememberedSet(const HeapBase& heap) : heap_(heap) {}
OldToNewRememberedSet(const OldToNewRememberedSet&) = delete;
OldToNewRememberedSet& operator=(const OldToNewRememberedSet&) = delete;
void AddSlot(void* slot);
void AddSourceObject(HeapObjectHeader& source_hoh);
void InvalidateRememberedSlotsInRange(void* begin, void* end);
void InvalidateRememberedSourceObject(HeapObjectHeader& source_hoh);
void Visit(Visitor&, MutatorMarkingState&);
void Reset();
private:
friend class MinorGCTest;
const HeapBase& heap_;
std::set<void*> remembered_slots_;
std::set<HeapObjectHeader*> remembered_source_objects_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_REMEMBERED_SET_H_
......@@ -137,8 +137,9 @@ void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
if (value_offset > 0 && age_table[value_offset] == AgeTable::Age::kOld)
return;
// Record slot.
local_data.heap_base.remembered_slots().insert(const_cast<void*>(slot));
local_data.heap_base.remembered_set().AddSlot((const_cast<void*>(slot)));
}
// static
......@@ -151,8 +152,8 @@ void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
->ObjectHeaderFromInnerAddress(inner_pointer);
// Record the source object.
local_data.heap_base.remembered_source_objects().emplace(
const_cast<HeapObjectHeader*>(&object_header));
local_data.heap_base.remembered_set().AddSourceObject(
const_cast<HeapObjectHeader&>(object_header));
}
#endif // CPPGC_YOUNG_GENERATION
......
......@@ -28,11 +28,91 @@ bool IsHeapObjectYoung(void* obj) {
bool IsHeapObjectOld(void* obj) { return !IsHeapObjectYoung(obj); }
class SimpleGCedBase : public GarbageCollected<SimpleGCedBase> {
public:
static size_t destructed_objects;
virtual ~SimpleGCedBase() { ++destructed_objects; }
virtual void Trace(Visitor* v) const { v->Trace(next); }
Member<SimpleGCedBase> next;
};
size_t SimpleGCedBase::destructed_objects;
template <size_t Size>
class SimpleGCed : public SimpleGCedBase {
char array[Size];
};
using Small = SimpleGCed<64>;
using Large = SimpleGCed<kLargeObjectSizeThreshold * 2>;
template <typename Type>
struct OtherType;
template <>
struct OtherType<Small> {
using Type = Large;
};
template <>
struct OtherType<Large> {
using Type = Small;
};
} // namespace
class MinorGCTest : public testing::TestWithHeap {
public:
MinorGCTest() {
CollectMajor();
SimpleGCedBase::destructed_objects = 0;
}
static size_t DestructedObjects() {
return SimpleGCedBase::destructed_objects;
}
void CollectMinor() {
Heap::From(GetHeap())->CollectGarbage(
Heap::Config::MinorPreciseAtomicConfig());
}
void CollectMajor() {
Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig());
}
const auto& RememberedSlots() const {
return Heap::From(GetHeap())->remembered_set().remembered_slots_;
}
const auto& RememberedSourceObjects() const {
return Heap::From(GetHeap())->remembered_set().remembered_source_objects_;
}
};
template <typename SmallOrLarge>
class MinorGCTestForType : public MinorGCTest {
public:
using Type = SmallOrLarge;
};
using ObjectTypes = ::testing::Types<Small, Large>;
TYPED_TEST_SUITE(MinorGCTestForType, ObjectTypes);
namespace {
template <typename... Args>
void RunMinorGCAndExpectObjectsPromoted(MinorGCTest& test, Args*... args) {
([args] { EXPECT_TRUE(IsHeapObjectYoung(args)); }(), ...);
test.CollectMinor();
([args] { EXPECT_TRUE(IsHeapObjectOld(args)); }(), ...);
}
struct ExpectRememberedSlotsAdded final {
ExpectRememberedSlotsAdded(
cppgc::Heap* heap,
const MinorGCTest& test,
std::initializer_list<void*> slots_expected_to_be_remembered)
: remembered_slots_(Heap::From(heap)->remembered_slots()),
: remembered_slots_(test.RememberedSlots()),
slots_expected_to_be_remembered_(slots_expected_to_be_remembered),
initial_number_of_slots_(remembered_slots_.size()) {
// Check that the remembered set doesn't contain specified slots.
......@@ -54,16 +134,16 @@ struct ExpectRememberedSlotsAdded final {
}
private:
std::set<void*>& remembered_slots_;
const std::set<void*>& remembered_slots_;
std::set<void*> slots_expected_to_be_remembered_;
const size_t initial_number_of_slots_ = 0;
};
struct ExpectRememberedSlotsRemoved final {
ExpectRememberedSlotsRemoved(
cppgc::Heap* heap,
const MinorGCTest& test,
std::initializer_list<void*> slots_expected_to_be_removed)
: remembered_slots_(Heap::From(heap)->remembered_slots()),
: remembered_slots_(test.RememberedSlots()),
slots_expected_to_be_removed_(slots_expected_to_be_removed),
initial_number_of_slots_(remembered_slots_.size()) {
DCHECK_GE(initial_number_of_slots_, slots_expected_to_be_removed_.size());
......@@ -85,14 +165,14 @@ struct ExpectRememberedSlotsRemoved final {
}
private:
std::set<void*>& remembered_slots_;
const std::set<void*>& remembered_slots_;
std::set<void*> slots_expected_to_be_removed_;
const size_t initial_number_of_slots_ = 0;
};
struct ExpectNoRememberedSlotsAdded final {
explicit ExpectNoRememberedSlotsAdded(cppgc::Heap* heap)
: remembered_slots_(Heap::From(heap)->remembered_slots()),
explicit ExpectNoRememberedSlotsAdded(const MinorGCTest& test)
: remembered_slots_(test.RememberedSlots()),
initial_remembered_slots_(remembered_slots_) {}
~ExpectNoRememberedSlotsAdded() {
......@@ -100,81 +180,12 @@ struct ExpectNoRememberedSlotsAdded final {
}
private:
std::set<void*>& remembered_slots_;
const std::set<void*>& remembered_slots_;
std::set<void*> initial_remembered_slots_;
};
class SimpleGCedBase : public GarbageCollected<SimpleGCedBase> {
public:
static size_t destructed_objects;
virtual ~SimpleGCedBase() { ++destructed_objects; }
virtual void Trace(Visitor* v) const { v->Trace(next); }
Member<SimpleGCedBase> next;
};
size_t SimpleGCedBase::destructed_objects;
template <size_t Size>
class SimpleGCed : public SimpleGCedBase {
char array[Size];
};
using Small = SimpleGCed<64>;
using Large = SimpleGCed<kLargeObjectSizeThreshold * 2>;
template <typename Type>
struct OtherType;
template <>
struct OtherType<Small> {
using Type = Large;
};
template <>
struct OtherType<Large> {
using Type = Small;
};
class MinorGCTest : public testing::TestWithHeap {
public:
MinorGCTest() {
CollectMajor();
SimpleGCedBase::destructed_objects = 0;
}
static size_t DestructedObjects() {
return SimpleGCedBase::destructed_objects;
}
void CollectMinor() {
Heap::From(GetHeap())->CollectGarbage(
Heap::Config::MinorPreciseAtomicConfig());
}
void CollectMajor() {
Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig());
}
};
template <typename SmallOrLarge>
class MinorGCTestForType : public MinorGCTest {
public:
using Type = SmallOrLarge;
};
template <typename... Args>
void RunMinorGCAndExpectObjectsPromoted(MinorGCTest& test, Args*... args) {
([args] { EXPECT_TRUE(IsHeapObjectYoung(args)); }(), ...);
test.CollectMinor();
([args] { EXPECT_TRUE(IsHeapObjectOld(args)); }(), ...);
}
} // namespace
using ObjectTypes = ::testing::Types<Small, Large>;
TYPED_TEST_SUITE(MinorGCTestForType, ObjectTypes);
TYPED_TEST(MinorGCTestForType, MinorCollection) {
using Type = typename TestFixture::Type;
......@@ -256,7 +267,7 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
}
}
const auto& set = Heap::From(heap)->remembered_slots();
const auto& set = test->RememberedSlots();
auto set_size_before = set.size();
// Issue generational barrier.
......@@ -321,12 +332,12 @@ TYPED_TEST(MinorGCTestForType, OmitGenerationalBarrierForSentinels) {
EXPECT_FALSE(HeapObjectHeader::FromObject(old.Get()).IsYoung());
{
ExpectNoRememberedSlotsAdded _(this->GetHeap());
ExpectNoRememberedSlotsAdded _(*this);
// Try issuing generational barrier for nullptr.
old->next = static_cast<Type*>(nullptr);
}
{
ExpectNoRememberedSlotsAdded _(this->GetHeap());
ExpectNoRememberedSlotsAdded _(*this);
// Try issuing generational barrier for sentinel.
old->next = static_cast<Type*>(kSentinelPointer);
}
......@@ -341,15 +352,13 @@ void TestRememberedSetInvalidation(MinorGCTest& test) {
auto* young = MakeGarbageCollected<To>(test.GetAllocationHandle());
{
ExpectRememberedSlotsAdded _(test.GetHeap(),
{old->next.GetSlotForTesting()});
ExpectRememberedSlotsAdded _(test, {old->next.GetSlotForTesting()});
// Issue the generational barrier.
old->next = young;
}
{
ExpectRememberedSlotsRemoved _(test.GetHeap(),
{old->next.GetSlotForTesting()});
ExpectRememberedSlotsRemoved _(test, {old->next.GetSlotForTesting()});
// Release the persistent and free the old object.
auto* old_raw = old.Release();
subtle::FreeUnreferencedObject(test.GetHeapHandle(), *old_raw);
......@@ -388,7 +397,7 @@ TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) {
auto* young = MakeGarbageCollected<Small>(GetAllocationHandle());
const auto& set = Heap::From(GetHeap())->remembered_slots();
const auto& set = RememberedSlots();
const size_t set_size_before_barrier = set.size();
// Issue the generational barriers.
......@@ -500,10 +509,9 @@ TYPED_TEST(MinorGCTestForType, GenerationalBarrierDeferredTracing) {
EXPECT_TRUE(IsHeapObjectOld(array.Get()));
const auto& remembered_objects =
Heap::From(this->GetHeap())->remembered_source_objects();
const auto& remembered_objects = this->RememberedSourceObjects();
{
ExpectNoRememberedSlotsAdded _(this->GetHeap());
ExpectNoRememberedSlotsAdded _(*this);
EXPECT_EQ(0u, remembered_objects.count(
&HeapObjectHeader::FromObject(array->objects)));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment