Commit 1924e5b9 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc: Returns BasePage::space() as reference

Also change:
- {NormalPageSpace, LargePageSpace}::From()
- ObjectAllocator::*

Bug: v8:11822
Change-Id: I78a1a5379e16fc1e1c95136d7aa8cc34caed0413
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2917042
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74784}
parent c32ba7e0
......@@ -103,7 +103,7 @@ void MovableReferences::AddOrFilter(MovableReference* slot) {
// The following cases are not compacted and do not require recording:
// - Compactable object on large pages.
// - Compactable object on non-compactable spaces.
if (value_page->is_large() || !value_page->space()->is_compactable()) return;
if (value_page->is_large() || !value_page->space().is_compactable()) return;
// Slots must reside in and values must point to live objects at this
// point. |value| usually points to a separate object but can also point
......@@ -124,7 +124,7 @@ void MovableReferences::AddOrFilter(MovableReference* slot) {
movable_references_.emplace(value, slot);
// Check whether the slot itself resides on a page that is compacted.
if (V8_LIKELY(!slot_page->space()->is_compactable())) return;
if (V8_LIKELY(!slot_page->space().is_compactable())) return;
CHECK_EQ(interior_movable_references_.end(),
interior_movable_references_.find(slot));
......@@ -227,7 +227,7 @@ class CompactionState final {
: space_(space), movable_references_(movable_references) {}
void AddPage(NormalPage* page) {
DCHECK_EQ(space_, page->space());
DCHECK_EQ(space_, &page->space());
// If not the first page, add |page| onto the available pages chain.
if (!current_page_)
current_page_ = page;
......@@ -296,7 +296,7 @@ class CompactionState final {
private:
void ReturnCurrentPageToSpace() {
DCHECK_EQ(space_, current_page_->space());
DCHECK_EQ(space_, &current_page_->space());
space_->AddPage(current_page_);
if (used_bytes_in_current_page_ != current_page_->PayloadSize()) {
// Put the remainder of the page onto the free list.
......
......@@ -38,14 +38,14 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
// BasePage is okay for regular and large objects.
BasePage* base_page = BasePage::FromPayload(object);
if (base_page->is_large()) { // Large object.
base_page->space()->RemovePage(base_page);
base_page->space().RemovePage(base_page);
base_page->heap().stats_collector()->NotifyExplicitFree(
LargePage::From(base_page)->PayloadSize());
LargePage::Destroy(LargePage::From(base_page));
} else { // Regular object.
const size_t header_size = header.AllocatedSize();
auto* normal_page = NormalPage::From(base_page);
auto& normal_space = *static_cast<NormalPageSpace*>(base_page->space());
auto& normal_space = *static_cast<NormalPageSpace*>(&base_page->space());
auto& lab = normal_space.linear_allocation_buffer();
ConstAddress payload_end = header.ObjectEnd();
SetMemoryInaccessible(&header, header_size);
......@@ -69,7 +69,7 @@ bool Grow(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
DCHECK_GE(size_delta, kAllocationGranularity);
DCHECK(!base_page.is_large());
auto& normal_space = *static_cast<NormalPageSpace*>(base_page.space());
auto& normal_space = *static_cast<NormalPageSpace*>(&base_page.space());
auto& lab = normal_space.linear_allocation_buffer();
if (lab.start() == header.ObjectEnd() && lab.size() >= size_delta) {
// LABs are considered used memory which means that no allocated size
......@@ -88,7 +88,7 @@ bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
DCHECK_GE(size_delta, kAllocationGranularity);
DCHECK(!base_page.is_large());
auto& normal_space = *static_cast<NormalPageSpace*>(base_page.space());
auto& normal_space = *static_cast<NormalPageSpace*>(&base_page.space());
auto& lab = normal_space.linear_allocation_buffer();
Address free_start = header.ObjectEnd() - size_delta;
if (lab.start() == header.ObjectEnd()) {
......
......@@ -92,7 +92,7 @@ const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
return nullptr;
// Check that the space has no linear allocation buffer.
DCHECK(!NormalPageSpace::From(normal_page->space())
->linear_allocation_buffer()
.linear_allocation_buffer()
.size());
}
......@@ -124,13 +124,12 @@ NormalPage* NormalPage::Create(PageBackend& page_backend,
// static
void NormalPage::Destroy(NormalPage* page) {
DCHECK(page);
BaseSpace* space = page->space();
DCHECK_EQ(space->end(), std::find(space->begin(), space->end(), page));
const BaseSpace& space = page->space();
DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
page->~NormalPage();
PageBackend* backend = page->heap().page_backend();
page->heap().stats_collector()->NotifyFreedMemory(kPageSize);
backend->FreeNormalPageMemory(space->index(),
reinterpret_cast<Address>(page));
backend->FreeNormalPageMemory(space.index(), reinterpret_cast<Address>(page));
}
NormalPage::NormalPage(HeapBase& heap, BaseSpace& space)
......@@ -143,13 +142,13 @@ NormalPage::NormalPage(HeapBase& heap, BaseSpace& space)
NormalPage::~NormalPage() = default;
NormalPage::iterator NormalPage::begin() {
const auto& lab = NormalPageSpace::From(space())->linear_allocation_buffer();
const auto& lab = NormalPageSpace::From(space()).linear_allocation_buffer();
return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadStart()),
lab.start(), lab.size());
}
NormalPage::const_iterator NormalPage::begin() const {
const auto& lab = NormalPageSpace::From(space())->linear_allocation_buffer();
const auto& lab = NormalPageSpace::From(space()).linear_allocation_buffer();
return const_iterator(
reinterpret_cast<const HeapObjectHeader*>(PayloadStart()), lab.start(),
lab.size());
......@@ -208,8 +207,8 @@ LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space,
void LargePage::Destroy(LargePage* page) {
DCHECK(page);
#if DEBUG
BaseSpace* space = page->space();
DCHECK_EQ(space->end(), std::find(space->begin(), space->end(), page));
const BaseSpace& space = page->space();
DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
#endif
page->~LargePage();
PageBackend* backend = page->heap().page_backend();
......
......@@ -35,8 +35,7 @@ class V8_EXPORT_PRIVATE BasePage {
HeapBase& heap() const { return heap_; }
BaseSpace* space() { return &space_; }
const BaseSpace* space() const { return &space_; }
BaseSpace& space() const { return space_; }
bool is_large() const { return type_ == PageType::kLarge; }
......
......@@ -88,12 +88,12 @@ class V8_EXPORT_PRIVATE NormalPageSpace final : public BaseSpace {
size_t size_ = 0;
};
static NormalPageSpace* From(BaseSpace* space) {
DCHECK(!space->is_large());
return static_cast<NormalPageSpace*>(space);
static NormalPageSpace& From(BaseSpace& space) {
DCHECK(!space.is_large());
return static_cast<NormalPageSpace&>(space);
}
static const NormalPageSpace* From(const BaseSpace* space) {
return From(const_cast<BaseSpace*>(space));
static const NormalPageSpace& From(const BaseSpace& space) {
return From(const_cast<BaseSpace&>(space));
}
NormalPageSpace(RawHeap* heap, size_t index, bool is_compactable);
......@@ -113,12 +113,12 @@ class V8_EXPORT_PRIVATE NormalPageSpace final : public BaseSpace {
class V8_EXPORT_PRIVATE LargePageSpace final : public BaseSpace {
public:
static LargePageSpace* From(BaseSpace* space) {
DCHECK(space->is_large());
return static_cast<LargePageSpace*>(space);
static LargePageSpace& From(BaseSpace& space) {
DCHECK(space.is_large());
return static_cast<LargePageSpace&>(space);
}
static const LargePageSpace* From(const BaseSpace* space) {
return From(const_cast<BaseSpace*>(space));
static const LargePageSpace& From(const BaseSpace& space) {
return From(const_cast<BaseSpace&>(space));
}
LargePageSpace(RawHeap* heap, size_t index);
......
......@@ -29,8 +29,8 @@ class HeapVisitor {
void Traverse(BaseSpace* space) {
const bool is_stopped =
space->is_large()
? VisitLargePageSpaceImpl(LargePageSpace::From(space))
: VisitNormalPageSpaceImpl(NormalPageSpace::From(space));
? VisitLargePageSpaceImpl(&LargePageSpace::From(*space))
: VisitNormalPageSpaceImpl(&NormalPageSpace::From(*space));
if (is_stopped) return;
for (auto* page : *space) {
Traverse(page);
......
......@@ -53,30 +53,26 @@ void MarkRangeAsYoung(BasePage* page, Address begin, Address end) {
#endif
}
void AddToFreeList(NormalPageSpace* space, Address start, size_t size) {
auto& free_list = space->free_list();
free_list.Add({start, size});
void AddToFreeList(NormalPageSpace& space, Address start, size_t size) {
space.free_list().Add({start, size});
NormalPage::From(BasePage::FromPayload(start))
->object_start_bitmap()
.SetBit(start);
}
void ReplaceLinearAllocationBuffer(NormalPageSpace* space,
StatsCollector* stats_collector,
void ReplaceLinearAllocationBuffer(NormalPageSpace& space,
StatsCollector& stats_collector,
Address new_buffer, size_t new_size) {
DCHECK_NOT_NULL(space);
DCHECK_NOT_NULL(stats_collector);
auto& lab = space->linear_allocation_buffer();
auto& lab = space.linear_allocation_buffer();
if (lab.size()) {
AddToFreeList(space, lab.start(), lab.size());
stats_collector->NotifyExplicitFree(lab.size());
stats_collector.NotifyExplicitFree(lab.size());
}
lab.Set(new_buffer, new_size);
if (new_size) {
DCHECK_NOT_NULL(new_buffer);
stats_collector->NotifyAllocation(new_size);
stats_collector.NotifyAllocation(new_size);
auto* page = NormalPage::From(BasePage::FromPayload(new_buffer));
page->object_start_bitmap().ClearBit(new_buffer);
MarkRangeAsYoung(page, new_buffer, new_buffer + new_size);
......@@ -108,7 +104,7 @@ ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
page_backend_(page_backend),
stats_collector_(stats_collector) {}
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size,
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size,
GCInfoIndex gcinfo) {
void* memory = OutOfLineAllocateImpl(space, size, gcinfo);
stats_collector_->NotifySafePointForConservativeCollection();
......@@ -116,7 +112,7 @@ void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size,
return memory;
}
void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
size_t size, GCInfoIndex gcinfo) {
DCHECK_EQ(0, size & kAllocationMask);
DCHECK_LE(kFreeListEntrySize, size);
......@@ -125,8 +121,8 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
// 1. If this allocation is big enough, allocate a large object.
if (size >= kLargeObjectSizeThreshold) {
auto* large_space = LargePageSpace::From(
raw_heap_->Space(RawHeap::RegularSpaceType::kLarge));
auto* large_space = &LargePageSpace::From(
*raw_heap_->Space(RawHeap::RegularSpaceType::kLarge));
return AllocateLargeObject(page_backend_, large_space, stats_collector_,
size, gcinfo);
}
......@@ -142,7 +138,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
// TODO(chromium:1056170): Investigate whether this should be a loop which
// would result in more agressive re-use of memory at the expense of
// potentially larger allocation time.
if (sweeper.SweepForAllocationIfRunning(space, size)) {
if (sweeper.SweepForAllocationIfRunning(&space, size)) {
// Sweeper found a block of at least `size` bytes. Allocation from the free
// list may still fail as actual buckets are not exhaustively searched for
// a suitable block. Instead, buckets are tested from larger sizes that are
......@@ -160,11 +156,11 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
// TODO(chromium:1056170): Make use of the synchronously freed memory.
// 5. Add a new page to this heap.
auto* new_page = NormalPage::Create(*page_backend_, *space);
space->AddPage(new_page);
auto* new_page = NormalPage::Create(*page_backend_, space);
space.AddPage(new_page);
// 6. Set linear allocation buffer to new page.
ReplaceLinearAllocationBuffer(space, stats_collector_,
ReplaceLinearAllocationBuffer(space, *stats_collector_,
new_page->PayloadStart(),
new_page->PayloadSize());
......@@ -175,13 +171,14 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
return result;
}
void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace* space, size_t size,
void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size,
GCInfoIndex gcinfo) {
const FreeList::Block entry = space->free_list().Allocate(size);
const FreeList::Block entry = space.free_list().Allocate(size);
if (!entry.address) return nullptr;
ReplaceLinearAllocationBuffer(
space, stats_collector_, static_cast<Address>(entry.address), entry.size);
ReplaceLinearAllocationBuffer(space, *stats_collector_,
static_cast<Address>(entry.address),
entry.size);
return AllocateObjectOnSpace(space, size, gcinfo);
}
......@@ -194,7 +191,7 @@ void ObjectAllocator::ResetLinearAllocationBuffers() {
bool VisitLargePageSpace(LargePageSpace*) { return true; }
bool VisitNormalPageSpace(NormalPageSpace* space) {
ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
ReplaceLinearAllocationBuffer(*space, *stats_collector_, nullptr, 0);
return true;
}
......
......@@ -57,11 +57,11 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
inline static RawHeap::RegularSpaceType GetInitialSpaceIndexForSize(
size_t size);
inline void* AllocateObjectOnSpace(NormalPageSpace* space, size_t size,
inline void* AllocateObjectOnSpace(NormalPageSpace& space, size_t size,
GCInfoIndex gcinfo);
void* OutOfLineAllocate(NormalPageSpace*, size_t, GCInfoIndex);
void* OutOfLineAllocateImpl(NormalPageSpace*, size_t, GCInfoIndex);
void* AllocateFromFreeList(NormalPageSpace*, size_t, GCInfoIndex);
void* OutOfLineAllocate(NormalPageSpace&, size_t, GCInfoIndex);
void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, GCInfoIndex);
void* AllocateFromFreeList(NormalPageSpace&, size_t, GCInfoIndex);
RawHeap* raw_heap_;
PageBackend* page_backend_;
......@@ -74,7 +74,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
const RawHeap::RegularSpaceType type =
GetInitialSpaceIndexForSize(allocation_size);
return AllocateObjectOnSpace(NormalPageSpace::From(raw_heap_->Space(type)),
return AllocateObjectOnSpace(NormalPageSpace::From(*raw_heap_->Space(type)),
allocation_size, gcinfo);
}
......@@ -84,7 +84,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo,
const size_t allocation_size =
RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
return AllocateObjectOnSpace(
NormalPageSpace::From(raw_heap_->CustomSpace(space_index)),
NormalPageSpace::From(*raw_heap_->CustomSpace(space_index)),
allocation_size, gcinfo);
}
......@@ -101,12 +101,12 @@ RawHeap::RegularSpaceType ObjectAllocator::GetInitialSpaceIndexForSize(
return RawHeap::RegularSpaceType::kNormal4;
}
void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace* space,
void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace& space,
size_t size, GCInfoIndex gcinfo) {
DCHECK_LT(0u, gcinfo);
NormalPageSpace::LinearAllocationBuffer& current_lab =
space->linear_allocation_buffer();
space.linear_allocation_buffer();
if (current_lab.size() < size) {
return OutOfLineAllocate(space, size, gcinfo);
}
......
......@@ -137,8 +137,7 @@ class InlinedFinalizationBuilder final {
}
void AddFreeListEntry(Address start, size_t size) {
auto* space = NormalPageSpace::From(page_->space());
space->free_list().Add({start, size});
NormalPageSpace::From(page_->space()).free_list().Add({start, size});
}
ResultType GetResult(bool is_empty, size_t largest_new_free_list_entry) {
......@@ -304,8 +303,7 @@ class SweepFinalizer final {
DCHECK(!page->is_large());
// Merge freelists without finalizers.
FreeList& space_freelist =
NormalPageSpace::From(page->space())->free_list();
FreeList& space_freelist = NormalPageSpace::From(page->space()).free_list();
space_freelist.Append(std::move(page_state->cached_free_list));
// Merge freelist with finalizers.
......@@ -317,7 +315,7 @@ class SweepFinalizer final {
page_state->largest_new_free_list_entry, largest_new_free_list_entry_);
// Add the page to the space.
page->space()->AddPage(page);
page->space().AddPage(page);
}
size_t largest_new_free_list_entry() const {
......@@ -397,7 +395,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
if (result.is_empty) {
NormalPage::Destroy(page);
} else {
page->space()->AddPage(page);
page->space().AddPage(page);
largest_new_free_list_entry_ = std::max(
result.largest_new_free_list_entry, largest_new_free_list_entry_);
}
......@@ -408,7 +406,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
HeapObjectHeader* header = page->ObjectHeader();
if (header->IsMarked()) {
StickyUnmark(header);
page->space()->AddPage(page);
page->space().AddPage(page);
} else {
header->Finalize();
LargePage::Destroy(page);
......@@ -450,7 +448,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
bool VisitNormalPage(NormalPage* page) {
SpaceState::SweptPageState sweep_result =
SweepNormalPage<DeferredFinalizationBuilder>(page);
const size_t space_index = page->space()->index();
const size_t space_index = page->space().index();
DCHECK_GT(states_->size(), space_index);
SpaceState& space_state = (*states_)[space_index];
space_state.swept_unfinalized_pages.Push(std::move(sweep_result));
......@@ -461,14 +459,14 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
HeapObjectHeader* header = page->ObjectHeader();
if (header->IsMarked()) {
StickyUnmark(header);
page->space()->AddPage(page);
page->space().AddPage(page);
return true;
}
if (!header->IsFinalizable()) {
LargePage::Destroy(page);
return true;
}
const size_t space_index = page->space()->index();
const size_t space_index = page->space().index();
DCHECK_GT(states_->size(), space_index);
SpaceState& state = (*states_)[space_index];
state.swept_unfinalized_pages.Push(
......
......@@ -122,11 +122,11 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
EXPECT_EQ(nullptr, backend->Lookup(reinterpret_cast<ConstAddress>(page)));
}
bool FreeListContains(const BaseSpace* space,
bool FreeListContains(const BaseSpace& space,
const std::vector<void*>& objects) {
const Heap* heap = Heap::From(GetHeap());
const PageBackend* backend = heap->page_backend();
const auto& freelist = NormalPageSpace::From(space)->free_list();
const auto& freelist = NormalPageSpace::From(space).free_list();
for (void* object : objects) {
// The corresponding page could be removed.
......@@ -148,7 +148,7 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfNormalPage) {
HeapObjectHeader::FromObject(marked_object).TryMarkAtomic();
auto* page = BasePage::FromPayload(unmarked_object);
auto* space = page->space();
auto& space = page->space();
// The test requires objects to be allocated on the same page;
ASSERT_EQ(page, BasePage::FromPayload(marked_object));
......@@ -188,9 +188,9 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) {
auto* unmarked_page = BasePage::FromPayload(unmarked_object);
auto* marked_page = BasePage::FromPayload(marked_object);
auto* space = unmarked_page->space();
auto& space = unmarked_page->space();
ASSERT_EQ(space, marked_page->space());
ASSERT_EQ(&space, &marked_page->space());
StartSweeping();
......@@ -210,7 +210,7 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) {
CheckPageRemoved(unmarked_page);
// Check that marked pages are returned to space right away.
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), marked_page));
EXPECT_NE(space.end(), std::find(space.begin(), space.end(), marked_page));
FinishSweeping();
}
......@@ -229,7 +229,7 @@ TEST_F(ConcurrentSweeperTest, DeferredFinalizationOfNormalPage) {
objects.push_back(object);
auto* page = BasePage::FromPayload(object);
pages.insert(page);
if (!space) space = page->space();
if (!space) space = &page->space();
}
StartSweeping();
......@@ -242,7 +242,7 @@ TEST_F(ConcurrentSweeperTest, DeferredFinalizationOfNormalPage) {
EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page));
}
// Check that finalizable objects are left intact in pages.
EXPECT_FALSE(FreeListContains(space, objects));
EXPECT_FALSE(FreeListContains(*space, objects));
// No finalizers have been executed.
EXPECT_EQ(0u, g_destructor_callcount);
......@@ -251,7 +251,7 @@ TEST_F(ConcurrentSweeperTest, DeferredFinalizationOfNormalPage) {
// Check that finalizable objects are swept and turned into freelist entries.
CheckFreeListEntries(objects);
// Check that space's freelist contains these entries.
EXPECT_TRUE(FreeListContains(space, objects));
EXPECT_TRUE(FreeListContains(*space, objects));
// Check that finalizers have been executed.
EXPECT_EQ(kNumberOfObjects, g_destructor_callcount);
}
......@@ -262,7 +262,7 @@ TEST_F(ConcurrentSweeperTest, DeferredFinalizationOfLargePage) {
auto* object = MakeGarbageCollected<GCedType>(GetAllocationHandle());
auto* page = BasePage::FromPayload(object);
auto* space = page->space();
auto& space = page->space();
StartSweeping();
......@@ -270,7 +270,7 @@ TEST_F(ConcurrentSweeperTest, DeferredFinalizationOfLargePage) {
WaitForConcurrentSweeping();
// Check that the page is not returned to the space.
EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page));
EXPECT_EQ(space.end(), std::find(space.begin(), space.end(), page));
// Check that no destructors have been executed yet.
EXPECT_EQ(0u, g_destructor_callcount);
......
......@@ -107,11 +107,11 @@ TEST_F(TestWithHeapWithCustomSpaces, AllocateOnCustomSpaces) {
auto* custom2 =
MakeGarbageCollected<CustomGCed2>(GetHeap()->GetAllocationHandle());
EXPECT_EQ(RawHeap::kNumberOfRegularSpaces,
NormalPage::FromPayload(custom1)->space()->index());
NormalPage::FromPayload(custom1)->space().index());
EXPECT_EQ(RawHeap::kNumberOfRegularSpaces + 1,
NormalPage::FromPayload(custom2)->space()->index());
NormalPage::FromPayload(custom2)->space().index());
EXPECT_EQ(static_cast<size_t>(RawHeap::RegularSpaceType::kNormal1),
NormalPage::FromPayload(regular)->space()->index());
NormalPage::FromPayload(regular)->space().index());
}
TEST_F(TestWithHeapWithCustomSpaces, DifferentSpacesUsesDifferentPages) {
......@@ -135,11 +135,11 @@ TEST_F(TestWithHeapWithCustomSpaces,
auto* custom2 =
MakeGarbageCollected<CustomGCedFinal2>(GetHeap()->GetAllocationHandle());
EXPECT_EQ(RawHeap::kNumberOfRegularSpaces,
NormalPage::FromPayload(custom1)->space()->index());
NormalPage::FromPayload(custom1)->space().index());
EXPECT_EQ(RawHeap::kNumberOfRegularSpaces,
NormalPage::FromPayload(custom2)->space()->index());
NormalPage::FromPayload(custom2)->space().index());
EXPECT_EQ(static_cast<size_t>(RawHeap::RegularSpaceType::kNormal1),
NormalPage::FromPayload(regular)->space()->index());
NormalPage::FromPayload(regular)->space().index());
}
TEST_F(TestWithHeapWithCustomSpaces, SweepCustomSpace) {
......@@ -246,11 +246,11 @@ TEST_F(TestWithHeapWithCompactableCustomSpaces,
GetHeap()->GetAllocationHandle());
auto* default_compactable = MakeGarbageCollected<DefaultCompactableGCed>(
GetHeap()->GetAllocationHandle());
EXPECT_TRUE(NormalPage::FromPayload(compactable)->space()->is_compactable());
EXPECT_TRUE(NormalPage::FromPayload(compactable)->space().is_compactable());
EXPECT_FALSE(
NormalPage::FromPayload(not_compactable)->space()->is_compactable());
NormalPage::FromPayload(not_compactable)->space().is_compactable());
EXPECT_FALSE(
NormalPage::FromPayload(default_compactable)->space()->is_compactable());
NormalPage::FromPayload(default_compactable)->space().is_compactable());
}
} // namespace internal
......
......@@ -49,8 +49,8 @@ class DynamicallySized final : public GarbageCollected<DynamicallySized> {
TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& lab = space->linear_allocation_buffer();
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& lab = space.linear_allocation_buffer();
auto& header = HeapObjectHeader::FromObject(o);
const size_t size = header.AllocatedSize();
Address needle = reinterpret_cast<Address>(&header);
......@@ -63,14 +63,14 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
EXPECT_EQ(lab_size_before_free + size, lab.size());
// LAB is included in allocated object size, so no change is expected.
EXPECT_EQ(allocated_size_before, AllocatedObjectSize());
EXPECT_FALSE(space->free_list().ContainsForTesting({needle, size}));
EXPECT_FALSE(space.free_list().ContainsForTesting({needle, size}));
}
TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& lab = space->linear_allocation_buffer();
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& lab = space.linear_allocation_buffer();
auto& header = HeapObjectHeader::FromObject(o);
const size_t size = header.AllocatedSize();
Address needle = reinterpret_cast<Address>(&header);
......@@ -81,7 +81,7 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_EQ(lab.start(), nullptr);
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
EXPECT_TRUE(space->free_list().ContainsForTesting({needle, size}));
EXPECT_TRUE(space.free_list().ContainsForTesting({needle, size}));
}
TEST_F(ExplicitManagementTest, FreeLargeObject) {
......@@ -157,7 +157,7 @@ TEST_F(ExplicitManagementTest, ShrinkFreeList) {
auto* o = MakeGarbageCollected<DynamicallySized>(
GetHeap()->GetAllocationHandle(),
AdditionalBytes(ObjectAllocator::kSmallestSpaceSize));
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
// Force returning to free list by removing the LAB.
ResetLinearAllocationBuffers();
auto& header = HeapObjectHeader::FromObject(o);
......@@ -165,7 +165,7 @@ TEST_F(ExplicitManagementTest, ShrinkFreeList) {
constexpr size_t size_of_o = sizeof(DynamicallySized);
EXPECT_TRUE(subtle::Resize(*o, AdditionalBytes(0)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o), header.ObjectSize());
EXPECT_TRUE(space->free_list().ContainsForTesting(
EXPECT_TRUE(space.free_list().ContainsForTesting(
{header.ObjectEnd(), ObjectAllocator::kSmallestSpaceSize}));
}
......@@ -173,7 +173,7 @@ TEST_F(ExplicitManagementTest, ShrinkFreeListBailoutAvoidFragmentation) {
auto* o = MakeGarbageCollected<DynamicallySized>(
GetHeap()->GetAllocationHandle(),
AdditionalBytes(ObjectAllocator::kSmallestSpaceSize - 1));
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
// Force returning to free list by removing the LAB.
ResetLinearAllocationBuffers();
auto& header = HeapObjectHeader::FromObject(o);
......@@ -183,7 +183,7 @@ TEST_F(ExplicitManagementTest, ShrinkFreeListBailoutAvoidFragmentation) {
EXPECT_EQ(RoundUp<kAllocationGranularity>(
size_of_o + ObjectAllocator::kSmallestSpaceSize - 1),
header.ObjectSize());
EXPECT_FALSE(space->free_list().ContainsForTesting(
EXPECT_FALSE(space.free_list().ContainsForTesting(
{header.ObjectStart() + RoundUp<kAllocationGranularity>(size_of_o),
ObjectAllocator::kSmallestSpaceSize - 1}));
}
......
......@@ -54,39 +54,39 @@ TEST_F(PageTest, PredefinedSpaces) {
RawHeap& heap = GetRawHeap();
{
auto* gced = MakeGarbageCollected<GCed<1>>(GetAllocationHandle());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal1), space);
EXPECT_EQ(0u, space->index());
EXPECT_FALSE(space->is_large());
BaseSpace& space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal1), &space);
EXPECT_EQ(0u, space.index());
EXPECT_FALSE(space.is_large());
}
{
auto* gced = MakeGarbageCollected<GCed<32>>(GetAllocationHandle());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal2), space);
EXPECT_EQ(1u, space->index());
EXPECT_FALSE(space->is_large());
BaseSpace& space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal2), &space);
EXPECT_EQ(1u, space.index());
EXPECT_FALSE(space.is_large());
}
{
auto* gced = MakeGarbageCollected<GCed<64>>(GetAllocationHandle());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal3), space);
EXPECT_EQ(2u, space->index());
EXPECT_FALSE(space->is_large());
BaseSpace& space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal3), &space);
EXPECT_EQ(2u, space.index());
EXPECT_FALSE(space.is_large());
}
{
auto* gced = MakeGarbageCollected<GCed<128>>(GetAllocationHandle());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal4), space);
EXPECT_EQ(3u, space->index());
EXPECT_FALSE(space->is_large());
BaseSpace& space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal4), &space);
EXPECT_EQ(3u, space.index());
EXPECT_FALSE(space.is_large());
}
{
auto* gced = MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(
GetAllocationHandle());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kLarge), space);
EXPECT_EQ(4u, space->index());
EXPECT_TRUE(space->is_large());
BaseSpace& space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kLarge), &space);
EXPECT_EQ(4u, space.index());
EXPECT_TRUE(space.is_large());
}
}
......@@ -112,7 +112,7 @@ TEST_F(PageTest, NormalPageIndexing) {
size_t page_n = 0;
for (const BasePage* page : *space) {
EXPECT_FALSE(page->is_large());
EXPECT_EQ(space, page->space());
EXPECT_EQ(space, &page->space());
++page_n;
}
EXPECT_EQ(page_n, space->size());
......
......@@ -87,7 +87,7 @@ TEST_F(SweeperTest, DontSweepMarkedNormalObject) {
auto* object = MakeGarbageCollected<Type>(GetAllocationHandle());
MarkObject(object);
BasePage* page = BasePage::FromPayload(object);
BaseSpace* space = page->space();
BaseSpace& space = page->space();
EXPECT_EQ(0u, g_destructor_callcount);
......@@ -95,7 +95,7 @@ TEST_F(SweeperTest, DontSweepMarkedNormalObject) {
EXPECT_EQ(0u, g_destructor_callcount);
// Check that page is returned back to the space.
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
EXPECT_NE(space.end(), std::find(space.begin(), space.end(), page));
EXPECT_NE(nullptr, GetBackend()->Lookup(reinterpret_cast<Address>(object)));
}
......@@ -105,7 +105,7 @@ TEST_F(SweeperTest, SweepUnmarkedLargeObject) {
auto* object = MakeGarbageCollected<Type>(GetAllocationHandle());
BasePage* page = BasePage::FromPayload(object);
BaseSpace* space = page->space();
BaseSpace& space = page->space();
EXPECT_EQ(0u, g_destructor_callcount);
......@@ -113,7 +113,7 @@ TEST_F(SweeperTest, SweepUnmarkedLargeObject) {
EXPECT_EQ(1u, g_destructor_callcount);
// Check that page is gone.
EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page));
EXPECT_EQ(space.end(), std::find(space.begin(), space.end(), page));
EXPECT_EQ(nullptr, GetBackend()->Lookup(reinterpret_cast<Address>(object)));
}
......@@ -124,7 +124,7 @@ TEST_F(SweeperTest, DontSweepMarkedLargeObject) {
auto* object = MakeGarbageCollected<Type>(GetAllocationHandle());
MarkObject(object);
BasePage* page = BasePage::FromPayload(object);
BaseSpace* space = page->space();
BaseSpace& space = page->space();
EXPECT_EQ(0u, g_destructor_callcount);
......@@ -132,7 +132,7 @@ TEST_F(SweeperTest, DontSweepMarkedLargeObject) {
EXPECT_EQ(0u, g_destructor_callcount);
// Check that page is returned back to the space.
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
EXPECT_NE(space.end(), std::find(space.begin(), space.end(), page));
EXPECT_NE(nullptr, GetBackend()->Lookup(reinterpret_cast<Address>(object)));
}
......@@ -202,7 +202,7 @@ TEST_F(SweeperTest, CoalesceFreeListEntries) {
HeapObjectHeader::FromObject(object3).AllocatedSize();
const BasePage* page = BasePage::FromPayload(object2);
const FreeList& freelist = NormalPageSpace::From(page->space())->free_list();
const FreeList& freelist = NormalPageSpace::From(page->space()).free_list();
const FreeList::Block coalesced_block = {
object2_start, static_cast<size_t>(object3_end - object2_start)};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment