Commit e691014a authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

cppgc: Use references instead of pointers for non-null fields

Replace non-null pointer in ObjectAllocator with references.

Bug: chromium:1056170
Change-Id: I55124610490bb903819f88a70b1f8e0fea4e430d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3132969
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76607}
parent 2a7bfabf
......@@ -75,8 +75,8 @@ HeapBase::HeapBase(
v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),
compactor_(raw_heap_),
object_allocator_(&raw_heap_, page_backend_.get(), stats_collector_.get(),
prefinalizer_handler_.get()),
object_allocator_(raw_heap_, *page_backend_, *stats_collector_,
*prefinalizer_handler_),
sweeper_(*this),
stack_support_(stack_support) {
stats_collector_->RegisterObserver(
......
......@@ -83,16 +83,16 @@ void ReplaceLinearAllocationBuffer(NormalPageSpace& space,
}
}
void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
StatsCollector* stats_collector, size_t size,
void* AllocateLargeObject(PageBackend& page_backend, LargePageSpace& space,
StatsCollector& stats_collector, size_t size,
GCInfoIndex gcinfo) {
LargePage* page = LargePage::Create(*page_backend, *space, size);
space->AddPage(page);
LargePage* page = LargePage::Create(page_backend, space, size);
space.AddPage(page);
auto* header = new (page->ObjectHeader())
HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo);
stats_collector->NotifyAllocation(size);
stats_collector.NotifyAllocation(size);
MarkRangeAsYoung(page, page->PayloadStart(), page->PayloadEnd());
return header->ObjectStart();
......@@ -102,9 +102,9 @@ void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
constexpr size_t ObjectAllocator::kSmallestSpaceSize;
ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
StatsCollector* stats_collector,
PreFinalizerHandler* prefinalizer_handler)
ObjectAllocator::ObjectAllocator(RawHeap& heap, PageBackend& page_backend,
StatsCollector& stats_collector,
PreFinalizerHandler& prefinalizer_handler)
: raw_heap_(heap),
page_backend_(page_backend),
stats_collector_(stats_collector),
......@@ -113,17 +113,17 @@ ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size,
GCInfoIndex gcinfo) {
void* memory = OutOfLineAllocateImpl(space, size, gcinfo);
stats_collector_->NotifySafePointForConservativeCollection();
raw_heap_->heap()->AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded();
if (prefinalizer_handler_->IsInvokingPreFinalizers()) {
stats_collector_.NotifySafePointForConservativeCollection();
raw_heap_.heap()->AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded();
if (prefinalizer_handler_.IsInvokingPreFinalizers()) {
// Objects allocated during pre finalizers should be allocated as black
// since marking is already done. Atomics are not needed because there is
// no concurrent marking in the background.
HeapObjectHeader::FromObject(memory).MarkNonAtomic();
// Resetting the allocation buffer forces all further allocations in pre
// finalizers to go through this slow path.
ReplaceLinearAllocationBuffer(space, *stats_collector_, nullptr, 0);
prefinalizer_handler_->NotifyAllocationInPrefinalizer(size);
ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
prefinalizer_handler_.NotifyAllocationInPrefinalizer(size);
}
return memory;
}
......@@ -137,8 +137,8 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
// 1. If this allocation is big enough, allocate a large object.
if (size >= kLargeObjectSizeThreshold) {
auto* large_space = &LargePageSpace::From(
*raw_heap_->Space(RawHeap::RegularSpaceType::kLarge));
auto& large_space = LargePageSpace::From(
*raw_heap_.Space(RawHeap::RegularSpaceType::kLarge));
return AllocateLargeObject(page_backend_, large_space, stats_collector_,
size, gcinfo);
}
......@@ -150,7 +150,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
// 3. Lazily sweep pages of this heap until we find a freed area for
// this allocation or we finish sweeping all pages of this heap.
Sweeper& sweeper = raw_heap_->heap()->sweeper();
Sweeper& sweeper = raw_heap_.heap()->sweeper();
// TODO(chromium:1056170): Investigate whether this should be a loop which
// would result in more agressive re-use of memory at the expense of
// potentially larger allocation time.
......@@ -172,11 +172,11 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
// TODO(chromium:1056170): Make use of the synchronously freed memory.
// 5. Add a new page to this heap.
auto* new_page = NormalPage::Create(*page_backend_, space);
auto* new_page = NormalPage::Create(page_backend_, space);
space.AddPage(new_page);
// 6. Set linear allocation buffer to new page.
ReplaceLinearAllocationBuffer(space, *stats_collector_,
ReplaceLinearAllocationBuffer(space, stats_collector_,
new_page->PayloadStart(),
new_page->PayloadSize());
......@@ -195,13 +195,12 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size,
// Assume discarded memory on that page is now zero.
auto& page = *NormalPage::From(BasePage::FromPayload(entry.address));
if (page.discarded_memory()) {
stats_collector_->DecrementDiscardedMemory(page.discarded_memory());
stats_collector_.DecrementDiscardedMemory(page.discarded_memory());
page.ResetDiscardedMemory();
}
ReplaceLinearAllocationBuffer(space, *stats_collector_,
static_cast<Address>(entry.address),
entry.size);
ReplaceLinearAllocationBuffer(
space, stats_collector_, static_cast<Address>(entry.address), entry.size);
return AllocateObjectOnSpace(space, size, gcinfo);
}
......@@ -209,20 +208,20 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size,
void ObjectAllocator::ResetLinearAllocationBuffers() {
class Resetter : public HeapVisitor<Resetter> {
public:
explicit Resetter(StatsCollector* stats) : stats_collector_(stats) {}
explicit Resetter(StatsCollector& stats) : stats_collector_(stats) {}
bool VisitLargePageSpace(LargePageSpace&) { return true; }
bool VisitNormalPageSpace(NormalPageSpace& space) {
ReplaceLinearAllocationBuffer(space, *stats_collector_, nullptr, 0);
ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
return true;
}
private:
StatsCollector* stats_collector_;
StatsCollector& stats_collector_;
} visitor(stats_collector_);
visitor.Traverse(*raw_heap_);
visitor.Traverse(raw_heap_);
}
void ObjectAllocator::Terminate() {
......@@ -230,7 +229,7 @@ void ObjectAllocator::Terminate() {
}
bool ObjectAllocator::in_disallow_gc_scope() const {
return raw_heap_->heap()->in_disallow_gc_scope();
return raw_heap_.heap()->in_disallow_gc_scope();
}
} // namespace internal
......
......@@ -38,9 +38,9 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
public:
static constexpr size_t kSmallestSpaceSize = 32;
ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
StatsCollector* stats_collector,
PreFinalizerHandler* prefinalizer_handler);
ObjectAllocator(RawHeap& heap, PageBackend& page_backend,
StatsCollector& stats_collector,
PreFinalizerHandler& prefinalizer_handler);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo,
......@@ -65,10 +65,10 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, GCInfoIndex);
void* AllocateFromFreeList(NormalPageSpace&, size_t, GCInfoIndex);
RawHeap* raw_heap_;
PageBackend* page_backend_;
StatsCollector* stats_collector_;
PreFinalizerHandler* prefinalizer_handler_;
RawHeap& raw_heap_;
PageBackend& page_backend_;
StatsCollector& stats_collector_;
PreFinalizerHandler& prefinalizer_handler_;
};
void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
......@@ -77,7 +77,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
const RawHeap::RegularSpaceType type =
GetInitialSpaceIndexForSize(allocation_size);
return AllocateObjectOnSpace(NormalPageSpace::From(*raw_heap_->Space(type)),
return AllocateObjectOnSpace(NormalPageSpace::From(*raw_heap_.Space(type)),
allocation_size, gcinfo);
}
......@@ -87,7 +87,7 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo,
const size_t allocation_size =
RoundUp<kAllocationGranularity>(size + sizeof(HeapObjectHeader));
return AllocateObjectOnSpace(
NormalPageSpace::From(*raw_heap_->CustomSpace(space_index)),
NormalPageSpace::From(*raw_heap_.CustomSpace(space_index)),
allocation_size, gcinfo);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment