Commit c036b6cd authored by Anton Bikineev's avatar Anton Bikineev Committed by Commit Bot

cppgc: Fix byte accounting for large pages and reset labs

This fixes two issues:
- labs resetting didn't account bytes as beeing freed;
- large object were not accounted.

The CL introduces a single bottleneck for labs resetting in
ObjectAllocator, which is aware of StatsCollector. This way
NormalSpace is treated as a value object and all invariants
are maintained by ObjectAllocator (and Sweeper).

Bug: chromium:1056170
Change-Id: I027cc01fe5028a3dfa81905d7ea53dd12d1c1f20
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2237629
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68286}
parent b5273050
...@@ -135,14 +135,12 @@ BasePage::BasePage(Heap* heap, BaseSpace* space, PageType type) ...@@ -135,14 +135,12 @@ BasePage::BasePage(Heap* heap, BaseSpace* space, PageType type)
} }
// static // static
NormalPage* NormalPage::Create(NormalPageSpace* space) { NormalPage* NormalPage::Create(PageBackend* page_backend,
DCHECK(space); NormalPageSpace* space) {
Heap* heap = space->raw_heap()->heap(); DCHECK_NOT_NULL(page_backend);
DCHECK(heap); DCHECK_NOT_NULL(space);
void* memory = heap->page_backend()->AllocateNormalPageMemory(space->index()); void* memory = page_backend->AllocateNormalPageMemory(space->index());
auto* normal_page = new (memory) NormalPage(heap, space); auto* normal_page = new (memory) NormalPage(space->raw_heap()->heap(), space);
space->AddPage(normal_page);
space->AddToFreeList(normal_page->PayloadStart(), normal_page->PayloadSize());
return normal_page; return normal_page;
} }
...@@ -207,17 +205,19 @@ LargePage::LargePage(Heap* heap, BaseSpace* space, size_t size) ...@@ -207,17 +205,19 @@ LargePage::LargePage(Heap* heap, BaseSpace* space, size_t size)
LargePage::~LargePage() = default; LargePage::~LargePage() = default;
// static // static
LargePage* LargePage::Create(LargePageSpace* space, size_t size) { LargePage* LargePage::Create(PageBackend* page_backend, LargePageSpace* space,
DCHECK(space); size_t size) {
DCHECK_NOT_NULL(page_backend);
DCHECK_NOT_NULL(space);
DCHECK_LE(kLargeObjectSizeThreshold, size); DCHECK_LE(kLargeObjectSizeThreshold, size);
const size_t page_header_size = const size_t page_header_size =
RoundUp(sizeof(LargePage), kAllocationGranularity); RoundUp(sizeof(LargePage), kAllocationGranularity);
const size_t allocation_size = page_header_size + size; const size_t allocation_size = page_header_size + size;
Heap* heap = space->raw_heap()->heap(); Heap* heap = space->raw_heap()->heap();
void* memory = heap->page_backend()->AllocateLargePageMemory(allocation_size); void* memory = page_backend->AllocateLargePageMemory(allocation_size);
LargePage* page = new (memory) LargePage(heap, space, size); LargePage* page = new (memory) LargePage(heap, space, size);
space->AddPage(page);
return page; return page;
} }
......
...@@ -111,8 +111,8 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage { ...@@ -111,8 +111,8 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
using iterator = IteratorImpl<HeapObjectHeader>; using iterator = IteratorImpl<HeapObjectHeader>;
using const_iterator = IteratorImpl<const HeapObjectHeader>; using const_iterator = IteratorImpl<const HeapObjectHeader>;
// Allocates a new page. // Allocates a new page in the detached state.
static NormalPage* Create(NormalPageSpace*); static NormalPage* Create(PageBackend*, NormalPageSpace*);
// Destroys and frees the page. The page must be detached from the // Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called). // corresponding space (i.e. be swept when called).
static void Destroy(NormalPage*); static void Destroy(NormalPage*);
...@@ -161,8 +161,8 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage { ...@@ -161,8 +161,8 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
class V8_EXPORT_PRIVATE LargePage final : public BasePage { class V8_EXPORT_PRIVATE LargePage final : public BasePage {
public: public:
// Allocates a new page. // Allocates a new page in the detached state.
static LargePage* Create(LargePageSpace*, size_t); static LargePage* Create(PageBackend*, LargePageSpace*, size_t);
// Destroys and frees the page. The page must be detached from the // Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called). // corresponding space (i.e. be swept when called).
static void Destroy(LargePage*); static void Destroy(LargePage*);
......
...@@ -39,21 +39,6 @@ BaseSpace::Pages BaseSpace::RemoveAllPages() { ...@@ -39,21 +39,6 @@ BaseSpace::Pages BaseSpace::RemoveAllPages() {
NormalPageSpace::NormalPageSpace(RawHeap* heap, size_t index) NormalPageSpace::NormalPageSpace(RawHeap* heap, size_t index)
: BaseSpace(heap, index, PageType::kNormal) {} : BaseSpace(heap, index, PageType::kNormal) {}
void NormalPageSpace::AddToFreeList(void* address, size_t size) {
free_list_.Add({address, size});
NormalPage::From(BasePage::FromPayload(address))
->object_start_bitmap()
.SetBit(static_cast<Address>(address));
}
void NormalPageSpace::ResetLinearAllocationBuffer() {
if (current_lab_.size()) {
DCHECK_NOT_NULL(current_lab_.start());
AddToFreeList(current_lab_.start(), current_lab_.size());
current_lab_.Set(nullptr, 0);
}
}
LargePageSpace::LargePageSpace(RawHeap* heap, size_t index) LargePageSpace::LargePageSpace(RawHeap* heap, size_t index)
: BaseSpace(heap, index, PageType::kLarge) {} : BaseSpace(heap, index, PageType::kLarge) {}
......
...@@ -94,9 +94,6 @@ class V8_EXPORT_PRIVATE NormalPageSpace final : public BaseSpace { ...@@ -94,9 +94,6 @@ class V8_EXPORT_PRIVATE NormalPageSpace final : public BaseSpace {
NormalPageSpace(RawHeap* heap, size_t index); NormalPageSpace(RawHeap* heap, size_t index);
void AddToFreeList(void*, size_t);
void ResetLinearAllocationBuffer();
LinearAllocationBuffer& linear_allocation_buffer() { return current_lab_; } LinearAllocationBuffer& linear_allocation_buffer() { return current_lab_; }
const LinearAllocationBuffer& linear_allocation_buffer() const { const LinearAllocationBuffer& linear_allocation_buffer() const {
return current_lab_; return current_lab_;
......
...@@ -136,7 +136,8 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform, ...@@ -136,7 +136,8 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
std::make_unique<PageBackend>(platform_->GetPageAllocator())), std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif #endif
stats_collector_(std::make_unique<StatsCollector>()), stats_collector_(std::make_unique<StatsCollector>()),
object_allocator_(&raw_heap_, stats_collector_.get()), object_allocator_(&raw_heap_, page_backend_.get(),
stats_collector_.get()),
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()), sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()),
gc_invoker_(this, platform_.get(), options.stack_support), gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(), growing_(&gc_invoker_, stats_collector_.get(),
......
...@@ -27,7 +27,11 @@ ...@@ -27,7 +27,11 @@
#include "src/heap/cppgc/virtual-memory.h" #include "src/heap/cppgc/virtual-memory.h"
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
#include "src/base/bounded-page-allocator.h" namespace v8 {
namespace base {
class BoundedPageAllocator;
}
} // namespace v8
#endif #endif
namespace cppgc { namespace cppgc {
...@@ -125,6 +129,7 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap, ...@@ -125,6 +129,7 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap,
cppgc::Platform* platform() { return platform_.get(); } cppgc::Platform* platform() { return platform_.get(); }
const cppgc::Platform* platform() const { return platform_.get(); } const cppgc::Platform* platform() const { return platform_.get(); }
ObjectAllocator& object_allocator() { return object_allocator_; }
Sweeper& sweeper() { return sweeper_; } Sweeper& sweeper() { return sweeper_; }
size_t epoch() const final { return epoch_; } size_t epoch() const final { return epoch_; }
......
...@@ -15,20 +15,6 @@ namespace cppgc { ...@@ -15,20 +15,6 @@ namespace cppgc {
namespace internal { namespace internal {
namespace { namespace {
class ResetLocalAllocationBufferVisitor final
: public HeapVisitor<ResetLocalAllocationBufferVisitor> {
public:
bool VisitLargePageSpace(LargePageSpace*) { return true; }
bool VisitNormalPageSpace(NormalPageSpace* space) {
space->ResetLinearAllocationBuffer();
return true;
}
};
void ResetLocalAllocationBuffers(Heap* heap) {
ResetLocalAllocationBufferVisitor visitor;
visitor.Traverse(&heap->raw_heap());
}
void EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config) { void EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config) {
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental || if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
...@@ -111,7 +97,7 @@ void Marker::FinishMarking(MarkingConfig config) { ...@@ -111,7 +97,7 @@ void Marker::FinishMarking(MarkingConfig config) {
// Reset LABs before trying to conservatively mark in-construction objects. // Reset LABs before trying to conservatively mark in-construction objects.
// This is also needed in preparation for sweeping. // This is also needed in preparation for sweeping.
ResetLocalAllocationBuffers(heap_); heap_->object_allocator().ResetLinearAllocationBuffers();
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) { if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
FlushNotFullyConstructedObjects(); FlushNotFullyConstructedObjects();
} else { } else {
...@@ -140,7 +126,7 @@ void Marker::ProcessWeakness() { ...@@ -140,7 +126,7 @@ void Marker::ProcessWeakness() {
void Marker::VisitRoots() { void Marker::VisitRoots() {
// Reset LABs before scanning roots. LABs are cleared to allow // Reset LABs before scanning roots. LABs are cleared to allow
// ObjectStartBitmap handling without considering LABs. // ObjectStartBitmap handling without considering LABs.
ResetLocalAllocationBuffers(heap_); heap_->object_allocator().ResetLinearAllocationBuffers();
heap_->GetStrongPersistentRegion().Trace(marking_visitor_.get()); heap_->GetStrongPersistentRegion().Trace(marking_visitor_.get());
if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers) if (config_.stack_state != MarkingConfig::StackState::kNoHeapPointers)
......
...@@ -4,11 +4,15 @@ ...@@ -4,11 +4,15 @@
#include "src/heap/cppgc/object-allocator.h" #include "src/heap/cppgc/object-allocator.h"
#include "src/base/logging.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/free-list.h"
#include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header-inl.h" #include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-space.h" #include "src/heap/cppgc/heap-space.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/heap.h" #include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/object-allocator-inl.h" #include "src/heap/cppgc/object-allocator-inl.h"
#include "src/heap/cppgc/object-start-bitmap.h" #include "src/heap/cppgc/object-start-bitmap.h"
...@@ -20,19 +24,56 @@ namespace cppgc { ...@@ -20,19 +24,56 @@ namespace cppgc {
namespace internal { namespace internal {
namespace { namespace {
void* AllocateLargeObject(RawHeap* raw_heap, LargePageSpace* space, size_t size, void AddToFreeList(NormalPageSpace* space, Address start, size_t size) {
auto& free_list = space->free_list();
free_list.Add({start, size});
NormalPage::From(BasePage::FromPayload(start))
->object_start_bitmap()
.SetBit(start);
}
void ReplaceLinearAllocationBuffer(NormalPageSpace* space,
StatsCollector* stats_collector,
Address new_buffer, size_t new_size) {
DCHECK_NOT_NULL(space);
DCHECK_NOT_NULL(stats_collector);
auto& lab = space->linear_allocation_buffer();
if (lab.size()) {
AddToFreeList(space, lab.start(), lab.size());
stats_collector->NotifyExplicitFree(lab.size());
}
lab.Set(new_buffer, new_size);
if (new_size) {
DCHECK_NOT_NULL(new_buffer);
stats_collector->NotifyAllocation(new_size);
NormalPage::From(BasePage::FromPayload(new_buffer))
->object_start_bitmap()
.ClearBit(new_buffer);
}
}
void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
StatsCollector* stats_collector, size_t size,
GCInfoIndex gcinfo) { GCInfoIndex gcinfo) {
LargePage* page = LargePage::Create(space, size); LargePage* page = LargePage::Create(page_backend, space, size);
space->AddPage(page);
auto* header = new (page->ObjectHeader()) auto* header = new (page->ObjectHeader())
HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo); HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo);
stats_collector->NotifyAllocation(size);
return header->Payload(); return header->Payload();
} }
} // namespace } // namespace
ObjectAllocator::ObjectAllocator(RawHeap* heap, StatsCollector* stats_collector) ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
: raw_heap_(heap), stats_collector_(stats_collector) {} StatsCollector* stats_collector)
: raw_heap_(heap),
page_backend_(page_backend),
stats_collector_(stats_collector) {}
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size, void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size,
GCInfoIndex gcinfo) { GCInfoIndex gcinfo) {
...@@ -50,7 +91,8 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space, ...@@ -50,7 +91,8 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
if (size >= kLargeObjectSizeThreshold) { if (size >= kLargeObjectSizeThreshold) {
auto* large_space = LargePageSpace::From( auto* large_space = LargePageSpace::From(
raw_heap_->Space(RawHeap::RegularSpaceType::kLarge)); raw_heap_->Space(RawHeap::RegularSpaceType::kLarge));
return AllocateLargeObject(raw_heap_, large_space, size, gcinfo); return AllocateLargeObject(page_backend_, large_space, stats_collector_,
size, gcinfo);
} }
// 2. Try to allocate from the freelist. // 2. Try to allocate from the freelist.
...@@ -66,7 +108,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space, ...@@ -66,7 +108,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
raw_heap_->heap()->sweeper().Finish(); raw_heap_->heap()->sweeper().Finish();
// 5. Add a new page to this heap. // 5. Add a new page to this heap.
NormalPage::Create(space); auto* new_page = NormalPage::Create(page_backend_, space);
space->AddPage(new_page);
AddToFreeList(space, new_page->PayloadStart(), new_page->PayloadSize());
// 6. Try to allocate from the freelist. This allocation must succeed. // 6. Try to allocate from the freelist. This allocation must succeed.
void* result = AllocateFromFreeList(space, size, gcinfo); void* result = AllocateFromFreeList(space, size, gcinfo);
...@@ -80,19 +124,30 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace* space, size_t size, ...@@ -80,19 +124,30 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace* space, size_t size,
const FreeList::Block entry = space->free_list().Allocate(size); const FreeList::Block entry = space->free_list().Allocate(size);
if (!entry.address) return nullptr; if (!entry.address) return nullptr;
auto& current_lab = space->linear_allocation_buffer(); ReplaceLinearAllocationBuffer(
if (current_lab.size()) { space, stats_collector_, static_cast<Address>(entry.address), entry.size);
space->AddToFreeList(current_lab.start(), current_lab.size());
stats_collector_->NotifyExplicitFree(current_lab.size());
}
current_lab.Set(static_cast<Address>(entry.address), entry.size);
stats_collector_->NotifyAllocation(current_lab.size());
NormalPage::From(BasePage::FromPayload(current_lab.start()))
->object_start_bitmap()
.ClearBit(current_lab.start());
return AllocateObjectOnSpace(space, size, gcinfo); return AllocateObjectOnSpace(space, size, gcinfo);
} }
void ObjectAllocator::ResetLinearAllocationBuffers() {
class Resetter : public HeapVisitor<Resetter> {
public:
explicit Resetter(StatsCollector* stats) : stats_collector_(stats) {}
bool VisitLargePageSpace(LargePageSpace*) { return true; }
bool VisitNormalPageSpace(NormalPageSpace* space) {
ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
return true;
}
private:
StatsCollector* stats_collector_;
} visitor(stats_collector_);
visitor.Traverse(raw_heap_);
}
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc
...@@ -13,15 +13,19 @@ namespace cppgc { ...@@ -13,15 +13,19 @@ namespace cppgc {
namespace internal { namespace internal {
class StatsCollector; class StatsCollector;
class PageBackend;
class V8_EXPORT_PRIVATE ObjectAllocator final { class V8_EXPORT_PRIVATE ObjectAllocator final {
public: public:
ObjectAllocator(RawHeap* heap, StatsCollector* stats_collector); ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
StatsCollector* stats_collector);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo); inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo, inline void* AllocateObject(size_t size, GCInfoIndex gcinfo,
CustomSpaceIndex space_index); CustomSpaceIndex space_index);
void ResetLinearAllocationBuffers();
private: private:
// Returns the initially tried SpaceType to allocate an object of |size| bytes // Returns the initially tried SpaceType to allocate an object of |size| bytes
// on. Returns the largest regular object size bucket for large objects. // on. Returns the largest regular object size bucket for large objects.
...@@ -35,6 +39,7 @@ class V8_EXPORT_PRIVATE ObjectAllocator final { ...@@ -35,6 +39,7 @@ class V8_EXPORT_PRIVATE ObjectAllocator final {
void* AllocateFromFreeList(NormalPageSpace*, size_t, GCInfoIndex); void* AllocateFromFreeList(NormalPageSpace*, size_t, GCInfoIndex);
RawHeap* raw_heap_; RawHeap* raw_heap_;
PageBackend* page_backend_;
StatsCollector* stats_collector_; StatsCollector* stats_collector_;
}; };
......
...@@ -27,21 +27,6 @@ namespace internal { ...@@ -27,21 +27,6 @@ namespace internal {
namespace { namespace {
class ResetLocalAllocationBufferVisitor final
: public HeapVisitor<ResetLocalAllocationBufferVisitor> {
public:
bool VisitLargePageSpace(LargePageSpace*) { return true; }
bool VisitNormalPageSpace(NormalPageSpace* space) {
space->ResetLinearAllocationBuffer();
return true;
}
};
void ResetLocalAllocationBuffers(Heap* heap) {
ResetLocalAllocationBufferVisitor visitor;
visitor.Traverse(&heap->raw_heap());
}
size_t g_destructor_callcount; size_t g_destructor_callcount;
template <size_t Size> template <size_t Size>
...@@ -82,7 +67,7 @@ class ConcurrentSweeperTest : public testing::TestWithHeap { ...@@ -82,7 +67,7 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
void StartSweeping() { void StartSweeping() {
Heap* heap = Heap::From(GetHeap()); Heap* heap = Heap::From(GetHeap());
ResetLocalAllocationBuffers(heap); ResetLinearAllocationBuffers();
// Pretend do finish marking as StatsCollector verifies that Notify* // Pretend do finish marking as StatsCollector verifies that Notify*
// methods are called in the right order. // methods are called in the right order.
heap->stats_collector()->NotifyMarkingStarted(); heap->stats_collector()->NotifyMarkingStarted();
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
#include "src/heap/cppgc/heap-object-header-inl.h" #include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/page-memory-inl.h" #include "src/heap/cppgc/page-memory-inl.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/raw-heap.h" #include "src/heap/cppgc/raw-heap.h"
#include "test/unittests/heap/cppgc/tests.h" #include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
...@@ -26,6 +27,9 @@ namespace { ...@@ -26,6 +27,9 @@ namespace {
class PageTest : public testing::TestWithHeap { class PageTest : public testing::TestWithHeap {
public: public:
RawHeap& GetRawHeap() { return Heap::From(GetHeap())->raw_heap(); } RawHeap& GetRawHeap() { return Heap::From(GetHeap())->raw_heap(); }
PageBackend* GetPageBackend() {
return Heap::From(GetHeap())->page_backend();
}
}; };
template <size_t Size> template <size_t Size>
...@@ -192,11 +196,15 @@ TEST_F(PageTest, NormalPageCreationDestruction) { ...@@ -192,11 +196,15 @@ TEST_F(PageTest, NormalPageCreationDestruction) {
const PageBackend* backend = Heap::From(GetHeap())->page_backend(); const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space = static_cast<NormalPageSpace*>( auto* space = static_cast<NormalPageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kNormal1)); heap.Space(RawHeap::RegularSpaceType::kNormal1));
auto* page = NormalPage::Create(space); auto* page = NormalPage::Create(GetPageBackend(), space);
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->AddPage(page);
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page)); EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
space->free_list().Add({page->PayloadStart(), page->PayloadSize()});
EXPECT_TRUE( EXPECT_TRUE(
space->free_list().Contains({page->PayloadStart(), page->PayloadSize()})); space->free_list().Contains({page->PayloadStart(), page->PayloadSize()}));
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->free_list().Clear(); space->free_list().Clear();
EXPECT_FALSE( EXPECT_FALSE(
...@@ -213,10 +221,12 @@ TEST_F(PageTest, LargePageCreationDestruction) { ...@@ -213,10 +221,12 @@ TEST_F(PageTest, LargePageCreationDestruction) {
const PageBackend* backend = Heap::From(GetHeap())->page_backend(); const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space = static_cast<LargePageSpace*>( auto* space = static_cast<LargePageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kLarge)); heap.Space(RawHeap::RegularSpaceType::kLarge));
auto* page = LargePage::Create(space, kObjectSize); auto* page = LargePage::Create(GetPageBackend(), space, kObjectSize);
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart())); EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->AddPage(page);
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
space->RemovePage(page); space->RemovePage(page);
EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page)); EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page));
LargePage::Destroy(page); LargePage::Destroy(page);
...@@ -229,13 +239,16 @@ TEST_F(PageTest, UnsweptPageDestruction) { ...@@ -229,13 +239,16 @@ TEST_F(PageTest, UnsweptPageDestruction) {
{ {
auto* space = static_cast<NormalPageSpace*>( auto* space = static_cast<NormalPageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kNormal1)); heap.Space(RawHeap::RegularSpaceType::kNormal1));
auto* page = NormalPage::Create(space); auto* page = NormalPage::Create(GetPageBackend(), space);
space->AddPage(page);
EXPECT_DEATH_IF_SUPPORTED(NormalPage::Destroy(page), ""); EXPECT_DEATH_IF_SUPPORTED(NormalPage::Destroy(page), "");
} }
{ {
auto* space = static_cast<LargePageSpace*>( auto* space = static_cast<LargePageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kLarge)); heap.Space(RawHeap::RegularSpaceType::kLarge));
auto* page = LargePage::Create(space, 2 * kLargeObjectSizeThreshold); auto* page = LargePage::Create(GetPageBackend(), space,
2 * kLargeObjectSizeThreshold);
space->AddPage(page);
EXPECT_DEATH_IF_SUPPORTED(LargePage::Destroy(page), ""); EXPECT_DEATH_IF_SUPPORTED(LargePage::Destroy(page), "");
// Detach page and really destroy page in the parent process so that sweeper // Detach page and really destroy page in the parent process so that sweeper
// doesn't consider it. // doesn't consider it.
......
...@@ -91,6 +91,8 @@ TEST_F(GCHeapTest, ObjectPayloadSize) { ...@@ -91,6 +91,8 @@ TEST_F(GCHeapTest, ObjectPayloadSize) {
Heap::From(GetHeap())->CollectGarbage( Heap::From(GetHeap())->CollectGarbage(
GarbageCollector::Config::ConservativeAtomicConfig()); GarbageCollector::Config::ConservativeAtomicConfig());
Heap::NoGCScope no_gc_scope(Heap::From(GetHeap()));
for (size_t k = 0; k < kNumberOfObjectsPerArena; ++k) { for (size_t k = 0; k < kNumberOfObjectsPerArena; ++k) {
MakeGarbageCollected<GCed<kObjectSizes[0]>>(GetHeap()); MakeGarbageCollected<GCed<kObjectSizes[0]>>(GetHeap());
MakeGarbageCollected<GCed<kObjectSizes[1]>>(GetHeap()); MakeGarbageCollected<GCed<kObjectSizes[1]>>(GetHeap());
......
...@@ -25,21 +25,6 @@ namespace internal { ...@@ -25,21 +25,6 @@ namespace internal {
namespace { namespace {
class ResetLocalAllocationBufferVisitor final
: public HeapVisitor<ResetLocalAllocationBufferVisitor> {
public:
bool VisitLargePageSpace(LargePageSpace*) { return true; }
bool VisitNormalPageSpace(NormalPageSpace* space) {
space->ResetLinearAllocationBuffer();
return true;
}
};
void ResetLocalAllocationBuffers(Heap* heap) {
ResetLocalAllocationBufferVisitor visitor;
visitor.Traverse(&heap->raw_heap());
}
size_t g_destructor_callcount; size_t g_destructor_callcount;
template <size_t Size> template <size_t Size>
...@@ -59,7 +44,7 @@ class SweeperTest : public testing::TestWithHeap { ...@@ -59,7 +44,7 @@ class SweeperTest : public testing::TestWithHeap {
void Sweep() { void Sweep() {
Heap* heap = Heap::From(GetHeap()); Heap* heap = Heap::From(GetHeap());
ResetLocalAllocationBuffers(heap); ResetLinearAllocationBuffers();
Sweeper& sweeper = heap->sweeper(); Sweeper& sweeper = heap->sweeper();
// Pretend do finish marking as StatsCollector verifies that Notify* // Pretend do finish marking as StatsCollector verifies that Notify*
// methods are called in the right order. // methods are called in the right order.
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#include <memory> #include <memory>
#include "src/heap/cppgc/object-allocator.h"
#include "test/unittests/heap/cppgc/test-platform.h" #include "test/unittests/heap/cppgc/test-platform.h"
namespace cppgc { namespace cppgc {
...@@ -29,6 +30,10 @@ void TestWithPlatform::TearDownTestSuite() { ...@@ -29,6 +30,10 @@ void TestWithPlatform::TearDownTestSuite() {
TestWithHeap::TestWithHeap() : heap_(Heap::Create(platform_)) {} TestWithHeap::TestWithHeap() : heap_(Heap::Create(platform_)) {}
void TestWithHeap::ResetLinearAllocationBuffers() {
Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers();
}
TestSupportingAllocationOnly::TestSupportingAllocationOnly() TestSupportingAllocationOnly::TestSupportingAllocationOnly()
: no_gc_scope_(internal::Heap::From(GetHeap())) {} : no_gc_scope_(internal::Heap::From(GetHeap())) {}
......
...@@ -41,6 +41,8 @@ class TestWithHeap : public TestWithPlatform { ...@@ -41,6 +41,8 @@ class TestWithHeap : public TestWithPlatform {
return Heap::From(GetHeap())->marker_; return Heap::From(GetHeap())->marker_;
} }
void ResetLinearAllocationBuffers();
private: private:
std::unique_ptr<cppgc::Heap> heap_; std::unique_ptr<cppgc::Heap> heap_;
}; };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment