Commit 69dd0c14 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc: Use reference instead of pointers when guaranteed not-null

Switches internals of BasePage and some getters to references that are
guaranteed non-null.

Bug: v8:11822
Change-Id: I484c4451720dc7e04f8b89dbe4fef03a3eaf817e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2917038Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74756}
parent 1458040f
......@@ -39,7 +39,7 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
BasePage* base_page = BasePage::FromPayload(object);
if (base_page->is_large()) { // Large object.
base_page->space()->RemovePage(base_page);
base_page->heap()->stats_collector()->NotifyExplicitFree(
base_page->heap().stats_collector()->NotifyExplicitFree(
LargePage::From(base_page)->PayloadSize());
LargePage::Destroy(LargePage::From(base_page));
} else { // Regular object.
......@@ -53,7 +53,7 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
lab.Set(reinterpret_cast<Address>(&header), lab.size() + header_size);
normal_page->object_start_bitmap().ClearBit(lab.start());
} else { // Returning to free list.
base_page->heap()->stats_collector()->NotifyExplicitFree(header_size);
base_page->heap().stats_collector()->NotifyExplicitFree(header_size);
normal_space.free_list().Add({&header, header_size});
// No need to update the bitmap as the same bit is reused for the free
// list entry.
......@@ -104,7 +104,7 @@ bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
// the smallest size class.
if (size_delta >= ObjectAllocator::kSmallestSpaceSize) {
SetMemoryInaccessible(free_start, size_delta);
base_page.heap()->stats_collector()->NotifyExplicitFree(size_delta);
base_page.heap().stats_collector()->NotifyExplicitFree(size_delta);
normal_space.free_list().Add({free_start, size_delta});
NormalPage::From(&base_page)->object_start_bitmap().SetBit(free_start);
header.SetAllocatedSize(new_size);
......@@ -121,7 +121,7 @@ bool Resize(void* object, size_t new_object_size) {
// BasePage is okay for regular and large objects.
BasePage* base_page = BasePage::FromPayload(object);
if (InGC(*base_page->heap())) {
if (InGC(base_page->heap())) {
return false;
}
......
......@@ -104,22 +104,20 @@ const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
return header;
}
BasePage::BasePage(HeapBase* heap, BaseSpace* space, PageType type)
BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type)
: heap_(heap), space_(space), type_(type) {
DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) &
kPageOffsetMask);
DCHECK_EQ(&heap_->raw_heap(), space_->raw_heap());
DCHECK_EQ(&heap_.raw_heap(), space_.raw_heap());
}
// static
NormalPage* NormalPage::Create(PageBackend* page_backend,
NormalPageSpace* space) {
DCHECK_NOT_NULL(page_backend);
DCHECK_NOT_NULL(space);
void* memory = page_backend->AllocateNormalPageMemory(space->index());
auto* normal_page = new (memory) NormalPage(space->raw_heap()->heap(), space);
NormalPage* NormalPage::Create(PageBackend& page_backend,
NormalPageSpace& space) {
void* memory = page_backend.AllocateNormalPageMemory(space.index());
auto* normal_page = new (memory) NormalPage(*space.raw_heap()->heap(), space);
normal_page->SynchronizedStore();
normal_page->heap()->stats_collector()->NotifyAllocatedMemory(kPageSize);
normal_page->heap().stats_collector()->NotifyAllocatedMemory(kPageSize);
return normal_page;
}
......@@ -129,13 +127,13 @@ void NormalPage::Destroy(NormalPage* page) {
BaseSpace* space = page->space();
DCHECK_EQ(space->end(), std::find(space->begin(), space->end(), page));
page->~NormalPage();
PageBackend* backend = page->heap()->page_backend();
page->heap()->stats_collector()->NotifyFreedMemory(kPageSize);
PageBackend* backend = page->heap().page_backend();
page->heap().stats_collector()->NotifyFreedMemory(kPageSize);
backend->FreeNormalPageMemory(space->index(),
reinterpret_cast<Address>(page));
}
NormalPage::NormalPage(HeapBase* heap, BaseSpace* space)
NormalPage::NormalPage(HeapBase& heap, BaseSpace& space)
: BasePage(heap, space, PageType::kNormal),
object_start_bitmap_(PayloadStart()) {
DCHECK_LT(kLargeObjectSizeThreshold,
......@@ -179,7 +177,7 @@ size_t NormalPage::PayloadSize() {
return kPageSize - 2 * kGuardPageSize - header_size;
}
LargePage::LargePage(HeapBase* heap, BaseSpace* space, size_t size)
LargePage::LargePage(HeapBase& heap, BaseSpace& space, size_t size)
: BasePage(heap, space, PageType::kLarge), payload_size_(size) {}
LargePage::~LargePage() = default;
......@@ -192,19 +190,17 @@ size_t LargePage::AllocationSize(size_t payload_size) {
}
// static
LargePage* LargePage::Create(PageBackend* page_backend, LargePageSpace* space,
LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space,
size_t size) {
DCHECK_NOT_NULL(page_backend);
DCHECK_NOT_NULL(space);
DCHECK_LE(kLargeObjectSizeThreshold, size);
const size_t allocation_size = AllocationSize(size);
auto* heap = space->raw_heap()->heap();
void* memory = page_backend->AllocateLargePageMemory(allocation_size);
LargePage* page = new (memory) LargePage(heap, space, size);
auto* heap = space.raw_heap()->heap();
void* memory = page_backend.AllocateLargePageMemory(allocation_size);
LargePage* page = new (memory) LargePage(*heap, space, size);
page->SynchronizedStore();
page->heap()->stats_collector()->NotifyAllocatedMemory(allocation_size);
page->heap().stats_collector()->NotifyAllocatedMemory(allocation_size);
return page;
}
......@@ -216,8 +212,8 @@ void LargePage::Destroy(LargePage* page) {
DCHECK_EQ(space->end(), std::find(space->begin(), space->end(), page));
#endif
page->~LargePage();
PageBackend* backend = page->heap()->page_backend();
page->heap()->stats_collector()->NotifyFreedMemory(
PageBackend* backend = page->heap().page_backend();
page->heap().stats_collector()->NotifyFreedMemory(
AllocationSize(page->PayloadSize()));
backend->FreeLargePageMemory(reinterpret_cast<Address>(page));
}
......
......@@ -33,11 +33,10 @@ class V8_EXPORT_PRIVATE BasePage {
BasePage(const BasePage&) = delete;
BasePage& operator=(const BasePage&) = delete;
HeapBase* heap() const { return heap_; }
HeapBase& heap() const { return heap_; }
BaseSpace* space() { return space_; }
const BaseSpace* space() const { return space_; }
void set_space(BaseSpace* space) { space_ = space; }
BaseSpace* space() { return &space_; }
const BaseSpace* space() const { return &space_; }
bool is_large() const { return type_ == PageType::kLarge; }
......@@ -82,11 +81,11 @@ class V8_EXPORT_PRIVATE BasePage {
protected:
enum class PageType : uint8_t { kNormal, kLarge };
BasePage(HeapBase*, BaseSpace*, PageType);
BasePage(HeapBase&, BaseSpace&, PageType);
private:
HeapBase* heap_;
BaseSpace* space_;
HeapBase& heap_;
BaseSpace& space_;
PageType type_;
};
......@@ -138,7 +137,7 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
using const_iterator = IteratorImpl<const HeapObjectHeader>;
// Allocates a new page in the detached state.
static NormalPage* Create(PageBackend*, NormalPageSpace*);
static NormalPage* Create(PageBackend&, NormalPageSpace&);
// Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called).
static void Destroy(NormalPage*);
......@@ -187,7 +186,7 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
}
private:
NormalPage(HeapBase* heap, BaseSpace* space);
NormalPage(HeapBase& heap, BaseSpace& space);
~NormalPage();
size_t allocated_bytes_at_last_gc_ = 0;
......@@ -199,7 +198,7 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage {
// Returns the allocation size required for a payload of size |size|.
static size_t AllocationSize(size_t size);
// Allocates a new page in the detached state.
static LargePage* Create(PageBackend*, LargePageSpace*, size_t);
static LargePage* Create(PageBackend&, LargePageSpace&, size_t);
// Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called).
static void Destroy(LargePage*);
......@@ -233,7 +232,7 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage {
}
private:
LargePage(HeapBase* heap, BaseSpace* space, size_t);
LargePage(HeapBase& heap, BaseSpace& space, size_t);
~LargePage();
size_t payload_size_;
......
......@@ -199,7 +199,7 @@ void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
bool MarkingStateBase::MarkNoPush(HeapObjectHeader& header) {
// A GC should only mark the objects that belong in its heap.
DCHECK_EQ(&heap_, BasePage::FromPayload(&header)->heap());
DCHECK_EQ(&heap_, &BasePage::FromPayload(&header)->heap());
// Never mark free space objects. This would e.g. hint to marking a promptly
// freed backing store.
DCHECK(!header.IsFree<AccessMode::kAtomic>());
......
......@@ -86,7 +86,7 @@ void ReplaceLinearAllocationBuffer(NormalPageSpace* space,
void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
StatsCollector* stats_collector, size_t size,
GCInfoIndex gcinfo) {
LargePage* page = LargePage::Create(page_backend, space, size);
LargePage* page = LargePage::Create(*page_backend, *space, size);
space->AddPage(page);
auto* header = new (page->ObjectHeader())
......@@ -160,7 +160,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
// TODO(chromium:1056170): Make use of the synchronously freed memory.
// 5. Add a new page to this heap.
auto* new_page = NormalPage::Create(page_backend_, space);
auto* new_page = NormalPage::Create(*page_backend_, *space);
space->AddPage(new_page);
// 6. Set linear allocation buffer to new page.
......
......@@ -42,7 +42,7 @@ void EnabledCheckingPolicy::CheckPointerImpl(const void* ptr,
// References cannot change their heap association which means that state is
// immutable once it is set.
if (!heap_) {
heap_ = base_page->heap();
heap_ = &base_page->heap();
if (!heap_->page_backend()->Lookup(reinterpret_cast<Address>(this))) {
// If `this` is not contained within the heap of `ptr`, we must deal with
// an on-stack or off-heap reference. For both cases there should be no
......@@ -52,7 +52,7 @@ void EnabledCheckingPolicy::CheckPointerImpl(const void* ptr,
}
// Member references should never mix heaps.
DCHECK_EQ(heap_, base_page->heap());
DCHECK_EQ(heap_, &base_page->heap());
// Header checks.
const HeapObjectHeader* header = nullptr;
......@@ -86,26 +86,26 @@ void EnabledCheckingPolicy::CheckPointerImpl(const void* ptr,
PersistentRegion& StrongPersistentPolicy::GetPersistentRegion(
const void* object) {
auto* heap = BasePage::FromPayload(object)->heap();
return heap->GetStrongPersistentRegion();
return BasePage::FromPayload(object)->heap().GetStrongPersistentRegion();
}
PersistentRegion& WeakPersistentPolicy::GetPersistentRegion(
const void* object) {
auto* heap = BasePage::FromPayload(object)->heap();
return heap->GetWeakPersistentRegion();
return BasePage::FromPayload(object)->heap().GetWeakPersistentRegion();
}
CrossThreadPersistentRegion&
StrongCrossThreadPersistentPolicy::GetPersistentRegion(const void* object) {
auto* heap = BasePage::FromPayload(object)->heap();
return heap->GetStrongCrossThreadPersistentRegion();
return BasePage::FromPayload(object)
->heap()
.GetStrongCrossThreadPersistentRegion();
}
CrossThreadPersistentRegion&
WeakCrossThreadPersistentPolicy::GetPersistentRegion(const void* object) {
auto* heap = BasePage::FromPayload(object)->heap();
return heap->GetWeakCrossThreadPersistentRegion();
return BasePage::FromPayload(object)
->heap()
.GetWeakCrossThreadPersistentRegion();
}
} // namespace internal
......
......@@ -21,7 +21,7 @@ void PreFinalizerRegistrationDispatcher::RegisterPrefinalizer(
PreFinalizer pre_finalizer) {
BasePage::FromPayload(pre_finalizer.object)
->heap()
->prefinalizer_handler()
.prefinalizer_handler()
->RegisterPrefinalizer(pre_finalizer);
}
......
......@@ -55,7 +55,7 @@ void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
if (!page) return;
DCHECK_EQ(&heap_, page->heap());
DCHECK_EQ(&heap_, &page->heap());
auto* header = page->TryObjectHeaderFromInnerAddress(
const_cast<Address>(reinterpret_cast<ConstAddress>(address)));
......
......@@ -63,19 +63,19 @@ void WriteBarrier::DijkstraMarkingBarrierSlowWithSentinelCheck(
// static
void WriteBarrier::DijkstraMarkingBarrierSlow(const void* value) {
const BasePage* page = BasePage::FromPayload(value);
const auto* heap = page->heap();
const auto& heap = page->heap();
// GetWriteBarrierType() checks marking state.
DCHECK(heap->marker());
DCHECK(heap.marker());
// No write barriers should be executed from atomic pause marking.
DCHECK(!heap->in_atomic_pause());
DCHECK(!heap.in_atomic_pause());
auto& header =
const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
if (!header.TryMarkAtomic()) return;
ProcessMarkValue<MarkerBase::WriteBarrierType::kDijkstra>(
header, heap->marker(), value);
header, heap.marker(), value);
}
// static
......@@ -108,19 +108,19 @@ void WriteBarrier::SteeleMarkingBarrierSlowWithSentinelCheck(
// static
void WriteBarrier::SteeleMarkingBarrierSlow(const void* value) {
const BasePage* page = BasePage::FromPayload(value);
const auto* heap = page->heap();
const auto& heap = page->heap();
// GetWriteBarrierType() checks marking state.
DCHECK(heap->marker());
DCHECK(heap.marker());
// No write barriers should be executed from atomic pause marking.
DCHECK(!heap->in_atomic_pause());
DCHECK(!heap.in_atomic_pause());
auto& header =
const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
if (!header.IsMarked<AccessMode::kAtomic>()) return;
ProcessMarkValue<MarkerBase::WriteBarrierType::kSteele>(
header, heap->marker(), value);
ProcessMarkValue<MarkerBase::WriteBarrierType::kSteele>(header, heap.marker(),
value);
}
#if defined(CPPGC_YOUNG_GENERATION)
......@@ -154,8 +154,8 @@ bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(const void* object,
// Large objects cannot have mixins, so we are guaranteed to always have
// a pointer on the same page.
const auto* page = BasePage::FromPayload(object);
*handle = page->heap();
const MarkerBase* marker = page->heap()->marker();
*handle = &page->heap();
const MarkerBase* marker = page->heap().marker();
return marker && marker->IsMarking();
}
......
......@@ -89,14 +89,14 @@ TEST_F(ExplicitManagementTest, FreeLargeObject) {
GetHeap()->GetAllocationHandle(),
AdditionalBytes(kLargeObjectSizeThreshold));
const auto* page = BasePage::FromPayload(o);
auto* heap = page->heap();
auto& heap = page->heap();
ASSERT_TRUE(page->is_large());
ConstAddress needle = reinterpret_cast<ConstAddress>(o);
const size_t size = LargePage::From(page)->PayloadSize();
EXPECT_TRUE(heap->page_backend()->Lookup(needle));
EXPECT_TRUE(heap.page_backend()->Lookup(needle));
const size_t allocated_size_before = AllocatedObjectSize();
subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_FALSE(heap->page_backend()->Lookup(needle));
EXPECT_FALSE(heap.page_backend()->Lookup(needle));
EXPECT_EQ(allocated_size_before - size, AllocatedObjectSize());
}
......@@ -104,12 +104,12 @@ TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) {
const size_t snapshot_before = AllocatedObjectSize();
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
auto* heap = BasePage::FromPayload(o)->heap();
heap->SetInAtomicPauseForTesting(true);
auto& heap = BasePage::FromPayload(o)->heap();
heap.SetInAtomicPauseForTesting(true);
const size_t allocated_size_before = AllocatedObjectSize();
subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_EQ(allocated_size_before, AllocatedObjectSize());
heap->SetInAtomicPauseForTesting(false);
heap.SetInAtomicPauseForTesting(false);
ResetLinearAllocationBuffers();
subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
EXPECT_EQ(snapshot_before, AllocatedObjectSize());
......@@ -192,8 +192,8 @@ TEST_F(ExplicitManagementTest, ResizeBailsOutDuringGC) {
auto* o = MakeGarbageCollected<DynamicallySized>(
GetHeap()->GetAllocationHandle(),
AdditionalBytes(ObjectAllocator::kSmallestSpaceSize - 1));
auto* heap = BasePage::FromPayload(o)->heap();
heap->SetInAtomicPauseForTesting(true);
auto& heap = BasePage::FromPayload(o)->heap();
heap.SetInAtomicPauseForTesting(true);
const size_t allocated_size_before = AllocatedObjectSize();
// Grow:
EXPECT_FALSE(
......@@ -201,7 +201,7 @@ TEST_F(ExplicitManagementTest, ResizeBailsOutDuringGC) {
// Shrink:
EXPECT_FALSE(subtle::Resize(*o, AdditionalBytes(0)));
EXPECT_EQ(allocated_size_before, AllocatedObjectSize());
heap->SetInAtomicPauseForTesting(false);
heap.SetInAtomicPauseForTesting(false);
}
} // namespace internal
......
......@@ -24,8 +24,8 @@ namespace {
class PageTest : public testing::TestWithHeap {
public:
RawHeap& GetRawHeap() { return Heap::From(GetHeap())->raw_heap(); }
PageBackend* GetPageBackend() {
return Heap::From(GetHeap())->page_backend();
PageBackend& GetPageBackend() {
return *Heap::From(GetHeap())->page_backend();
}
};
......@@ -188,7 +188,7 @@ TEST_F(PageTest, NormalPageCreationDestruction) {
const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space = static_cast<NormalPageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kNormal1));
auto* page = NormalPage::Create(GetPageBackend(), space);
auto* page = NormalPage::Create(GetPageBackend(), *space);
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->AddPage(page);
......@@ -213,7 +213,7 @@ TEST_F(PageTest, LargePageCreationDestruction) {
const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space = static_cast<LargePageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kLarge));
auto* page = LargePage::Create(GetPageBackend(), space, kObjectSize);
auto* page = LargePage::Create(GetPageBackend(), *space, kObjectSize);
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->AddPage(page);
......@@ -231,14 +231,14 @@ TEST_F(PageTest, UnsweptPageDestruction) {
{
auto* space = static_cast<NormalPageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kNormal1));
auto* page = NormalPage::Create(GetPageBackend(), space);
auto* page = NormalPage::Create(GetPageBackend(), *space);
space->AddPage(page);
EXPECT_DEATH_IF_SUPPORTED(NormalPage::Destroy(page), "");
}
{
auto* space = static_cast<LargePageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kLarge));
auto* page = LargePage::Create(GetPageBackend(), space,
auto* page = LargePage::Create(GetPageBackend(), *space,
2 * kLargeObjectSizeThreshold);
space->AddPage(page);
EXPECT_DEATH_IF_SUPPORTED(LargePage::Destroy(page), "");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment