Commit e691014a authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

cppgc: Use references instead of pointers for non-null fields

Replace non-null pointer in ObjectAllocator with references.

Bug: chromium:1056170
Change-Id: I55124610490bb903819f88a70b1f8e0fea4e430d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3132969
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76607}
parent 2a7bfabf
...@@ -75,8 +75,8 @@ HeapBase::HeapBase( ...@@ -75,8 +75,8 @@ HeapBase::HeapBase(
v8::base::Stack::GetStackStart())), v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)), prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),
compactor_(raw_heap_), compactor_(raw_heap_),
object_allocator_(&raw_heap_, page_backend_.get(), stats_collector_.get(), object_allocator_(raw_heap_, *page_backend_, *stats_collector_,
prefinalizer_handler_.get()), *prefinalizer_handler_),
sweeper_(*this), sweeper_(*this),
stack_support_(stack_support) { stack_support_(stack_support) {
stats_collector_->RegisterObserver( stats_collector_->RegisterObserver(
......
...@@ -83,16 +83,16 @@ void ReplaceLinearAllocationBuffer(NormalPageSpace& space, ...@@ -83,16 +83,16 @@ void ReplaceLinearAllocationBuffer(NormalPageSpace& space,
} }
} }
void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space, void* AllocateLargeObject(PageBackend& page_backend, LargePageSpace& space,
StatsCollector* stats_collector, size_t size, StatsCollector& stats_collector, size_t size,
GCInfoIndex gcinfo) { GCInfoIndex gcinfo) {
LargePage* page = LargePage::Create(*page_backend, *space, size); LargePage* page = LargePage::Create(page_backend, space, size);
space->AddPage(page); space.AddPage(page);
auto* header = new (page->ObjectHeader()) auto* header = new (page->ObjectHeader())
HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo); HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo);
stats_collector->NotifyAllocation(size); stats_collector.NotifyAllocation(size);
MarkRangeAsYoung(page, page->PayloadStart(), page->PayloadEnd()); MarkRangeAsYoung(page, page->PayloadStart(), page->PayloadEnd());
return header->ObjectStart(); return header->ObjectStart();
...@@ -102,9 +102,9 @@ void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space, ...@@ -102,9 +102,9 @@ void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
constexpr size_t ObjectAllocator::kSmallestSpaceSize; constexpr size_t ObjectAllocator::kSmallestSpaceSize;
ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend, ObjectAllocator::ObjectAllocator(RawHeap& heap, PageBackend& page_backend,
StatsCollector* stats_collector, StatsCollector& stats_collector,
PreFinalizerHandler* prefinalizer_handler) PreFinalizerHandler& prefinalizer_handler)
: raw_heap_(heap), : raw_heap_(heap),
page_backend_(page_backend), page_backend_(page_backend),
stats_collector_(stats_collector), stats_collector_(stats_collector),
...@@ -113,17 +113,17 @@ ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend, ...@@ -113,17 +113,17 @@ ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size, void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size,
GCInfoIndex gcinfo) { GCInfoIndex gcinfo) {
void* memory = OutOfLineAllocateImpl(space, size, gcinfo); void* memory = OutOfLineAllocateImpl(space, size, gcinfo);
stats_collector_->NotifySafePointForConservativeCollection(); stats_collector_.NotifySafePointForConservativeCollection();
raw_heap_->heap()->AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded(); raw_heap_.heap()->AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded();
if (prefinalizer_handler_->IsInvokingPreFinalizers()) { if (prefinalizer_handler_.IsInvokingPreFinalizers()) {
// Objects allocated during pre finalizers should be allocated as black // Objects allocated during pre finalizers should be allocated as black
// since marking is already done. Atomics are not needed because there is // since marking is already done. Atomics are not needed because there is
// no concurrent marking in the background. // no concurrent marking in the background.
HeapObjectHeader::FromObject(memory).MarkNonAtomic(); HeapObjectHeader::FromObject(memory).MarkNonAtomic();
// Resetting the allocation buffer forces all further allocations in pre // Resetting the allocation buffer forces all further allocations in pre
// finalizers to go through this slow path. // finalizers to go through this slow path.
ReplaceLinearAllocationBuffer(space, *stats_collector_, nullptr, 0); ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
prefinalizer_handler_->NotifyAllocationInPrefinalizer(size); prefinalizer_handler_.NotifyAllocationInPrefinalizer(size);
} }
return memory; return memory;
} }
...@@ -137,8 +137,8 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -137,8 +137,8 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
// 1. If this allocation is big enough, allocate a large object. // 1. If this allocation is big enough, allocate a large object.
if (size >= kLargeObjectSizeThreshold) { if (size >= kLargeObjectSizeThreshold) {
auto* large_space = &LargePageSpace::From( auto& large_space = LargePageSpace::From(
*raw_heap_->Space(RawHeap::RegularSpaceType::kLarge)); *raw_heap_.Space(RawHeap::RegularSpaceType::kLarge));
return AllocateLargeObject(page_backend_, large_space, stats_collector_, return AllocateLargeObject(page_backend_, large_space, stats_collector_,
size, gcinfo); size, gcinfo);
} }
...@@ -150,7 +150,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -150,7 +150,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
// 3. Lazily sweep pages of this heap until we find a freed area for // 3. Lazily sweep pages of this heap until we find a freed area for
// this allocation or we finish sweeping all pages of this heap. // this allocation or we finish sweeping all pages of this heap.
Sweeper& sweeper = raw_heap_->heap()->sweeper(); Sweeper& sweeper = raw_heap_.heap()->sweeper();
// TODO(chromium:1056170): Investigate whether this should be a loop which // TODO(chromium:1056170): Investigate whether this should be a loop which
// would result in more agressive re-use of memory at the expense of // would result in more agressive re-use of memory at the expense of
// potentially larger allocation time. // potentially larger allocation time.
...@@ -172,11 +172,11 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -172,11 +172,11 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
// TODO(chromium:1056170): Make use of the synchronously freed memory. // TODO(chromium:1056170): Make use of the synchronously freed memory.
// 5. Add a new page to this heap. // 5. Add a new page to this heap.
auto* new_page = NormalPage::Create(*page_backend_, space); auto* new_page = NormalPage::Create(page_backend_, space);
space.AddPage(new_page); space.AddPage(new_page);
// 6. Set linear allocation buffer to new page. // 6. Set linear allocation buffer to new page.
ReplaceLinearAllocationBuffer(space, *stats_collector_, ReplaceLinearAllocationBuffer(space, stats_collector_,
new_page->PayloadStart(), new_page->PayloadStart(),
new_page->PayloadSize()); new_page->PayloadSize());
...@@ -195,13 +195,12 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size, ...@@ -195,13 +195,12 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size,
// Assume discarded memory on that page is now zero. // Assume discarded memory on that page is now zero.
auto& page = *NormalPage::From(BasePage::FromPayload(entry.address)); auto& page = *NormalPage::From(BasePage::FromPayload(entry.address));
if (page.discarded_memory()) { if (page.discarded_memory()) {
stats_collector_->DecrementDiscardedMemory(page.discarded_memory()); stats_collector_.DecrementDiscardedMemory(page.discarded_memory());
page.ResetDiscardedMemory(); page.ResetDiscardedMemory();
} }
ReplaceLinearAllocationBuffer(space, *stats_collector_, ReplaceLinearAllocationBuffer(
static_cast<Address>(entry.address), space, stats_collector_, static_cast<Address>(entry.address), entry.size);
entry.size);
return AllocateObjectOnSpace(space, size, gcinfo); return AllocateObjectOnSpace(space, size, gcinfo);
} }
...@@ -209,20 +208,20 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size, ...@@ -209,20 +208,20 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size,
void ObjectAllocator::ResetLinearAllocationBuffers() { void ObjectAllocator::ResetLinearAllocationBuffers() {
class Resetter : public HeapVisitor<Resetter> { class Resetter : public HeapVisitor<Resetter> {
public: public:
explicit Resetter(StatsCollector* stats) : stats_collector_(stats) {} explicit Resetter(StatsCollector& stats) : stats_collector_(stats) {}
bool VisitLargePageSpace(LargePageSpace&) { return true; } bool VisitLargePageSpace(LargePageSpace&) { return true; }
bool VisitNormalPageSpace(NormalPageSpace& space) { bool VisitNormalPageSpace(NormalPageSpace& space) {
ReplaceLinearAllocationBuffer(space, *stats_collector_, nullptr, 0); ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
return true; return true;
} }
private: private:
StatsCollector* stats_collector_; StatsCollector& stats_collector_;
} visitor(stats_collector_); } visitor(stats_collector_);
visitor.Traverse(*raw_heap_); visitor.Traverse(raw_heap_);
} }
void ObjectAllocator::Terminate() { void ObjectAllocator::Terminate() {
...@@ -230,7 +229,7 @@ void ObjectAllocator::Terminate() { ...@@ -230,7 +229,7 @@ void ObjectAllocator::Terminate() {
} }
bool ObjectAllocator::in_disallow_gc_scope() const { bool ObjectAllocator::in_disallow_gc_scope() const {
return raw_heap_->heap()->in_disallow_gc_scope(); return raw_heap_.heap()->in_disallow_gc_scope();
} }
} // namespace internal } // namespace internal
......
...@@ -38,9 +38,9 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle { ...@@ -38,9 +38,9 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
public: public:
static constexpr size_t kSmallestSpaceSize = 32; static constexpr size_t kSmallestSpaceSize = 32;
ObjectAllocator(RawHeap* heap, PageBackend* page_backend, ObjectAllocator(RawHeap& heap, PageBackend& page_backend,
StatsCollector* stats_collector, StatsCollector& stats_collector,
PreFinalizerHandler* prefinalizer_handler); PreFinalizerHandler& prefinalizer_handler);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo); inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo, inline void* AllocateObject(size_t size, GCInfoIndex gcinfo,
...@@ -65,10 +65,10 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle { ...@@ -65,10 +65,10 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, GCInfoIndex); void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, GCInfoIndex);
void* AllocateFromFreeList(NormalPageSpace&, size_t, GCInfoIndex); void* AllocateFromFreeList(NormalPageSpace&, size_t, GCInfoIndex);
RawHeap* raw_heap_; RawHeap& raw_heap_;
PageBackend* page_backend_; PageBackend& page_backend_;
StatsCollector* stats_collector_; StatsCollector& stats_collector_;
PreFinalizerHandler* prefinalizer_handler_; PreFinalizerHandler& prefinalizer_handler_;
}; };
void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) { void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
...@@ -77,7 +77,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) { ...@@ -77,7 +77,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader)); RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
const RawHeap::RegularSpaceType type = const RawHeap::RegularSpaceType type =
GetInitialSpaceIndexForSize(allocation_size); GetInitialSpaceIndexForSize(allocation_size);
return AllocateObjectOnSpace(NormalPageSpace::From(*raw_heap_->Space(type)), return AllocateObjectOnSpace(NormalPageSpace::From(*raw_heap_.Space(type)),
allocation_size, gcinfo); allocation_size, gcinfo);
} }
...@@ -87,7 +87,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo, ...@@ -87,7 +87,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo,
const size_t allocation_size = const size_t allocation_size =
RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader)); RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
return AllocateObjectOnSpace( return AllocateObjectOnSpace(
NormalPageSpace::From(*raw_heap_->CustomSpace(space_index)), NormalPageSpace::From(*raw_heap_.CustomSpace(space_index)),
allocation_size, gcinfo); allocation_size, gcinfo);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment