Commit f19e2e68 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc: Use reference instead of pointers in HeapVisitor

Bug: v8:11822
Change-Id: I35f3b5ce71ab5f86a5d9991bb9d729a2fe56f6dd
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2919955Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74840}
parent 52d65418
...@@ -477,9 +477,9 @@ class LiveObjectsForVisibilityIterator final ...@@ -477,9 +477,9 @@ class LiveObjectsForVisibilityIterator final
: graph_builder_(graph_builder) {} : graph_builder_(graph_builder) {}
private: private:
bool VisitHeapObjectHeader(HeapObjectHeader* header) { bool VisitHeapObjectHeader(HeapObjectHeader& header) {
if (header->IsFree()) return true; if (header.IsFree()) return true;
graph_builder_.VisitForVisibility(nullptr, *header); graph_builder_.VisitForVisibility(nullptr, header);
graph_builder_.ProcessPendingObjects(); graph_builder_.ProcessPendingObjects();
return true; return true;
} }
...@@ -686,7 +686,7 @@ void CppGraphBuilderImpl::Run() { ...@@ -686,7 +686,7 @@ void CppGraphBuilderImpl::Run() {
// First pass: Figure out which objects should be included in the graph -- see // First pass: Figure out which objects should be included in the graph -- see
// class-level comment on CppGraphBuilder. // class-level comment on CppGraphBuilder.
LiveObjectsForVisibilityIterator visitor(*this); LiveObjectsForVisibilityIterator visitor(*this);
visitor.Traverse(&cpp_heap_.raw_heap()); visitor.Traverse(cpp_heap_.raw_heap());
// Second pass: Add graph nodes for objects that must be shown. // Second pass: Add graph nodes for objects that must be shown.
states_.ForAllVisibleStates([this](StateBase* state) { states_.ForAllVisibleStates([this](StateBase* state) {
ParentScope parent_scope(*state); ParentScope parent_scope(*state);
......
...@@ -374,7 +374,7 @@ void CompactSpace(NormalPageSpace* space, ...@@ -374,7 +374,7 @@ void CompactSpace(NormalPageSpace* space,
using Pages = NormalPageSpace::Pages; using Pages = NormalPageSpace::Pages;
#ifdef V8_USE_ADDRESS_SANITIZER #ifdef V8_USE_ADDRESS_SANITIZER
UnmarkedObjectsPoisoner().Traverse(space); UnmarkedObjectsPoisoner().Traverse(*space);
#endif // V8_USE_ADDRESS_SANITIZER #endif // V8_USE_ADDRESS_SANITIZER
DCHECK(space->is_compactable()); DCHECK(space->is_compactable());
......
...@@ -29,18 +29,18 @@ class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> { ...@@ -29,18 +29,18 @@ class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
friend class HeapVisitor<ObjectSizeCounter>; friend class HeapVisitor<ObjectSizeCounter>;
public: public:
size_t GetSize(RawHeap* heap) { size_t GetSize(RawHeap& heap) {
Traverse(heap); Traverse(heap);
return accumulated_size_; return accumulated_size_;
} }
private: private:
static size_t ObjectSize(const HeapObjectHeader* header) { static size_t ObjectSize(const HeapObjectHeader& header) {
return ObjectView(*header).Size(); return ObjectView(header).Size();
} }
bool VisitHeapObjectHeader(HeapObjectHeader* header) { bool VisitHeapObjectHeader(HeapObjectHeader& header) {
if (header->IsFree()) return true; if (header.IsFree()) return true;
accumulated_size_ += ObjectSize(header); accumulated_size_ += ObjectSize(header);
return true; return true;
} }
...@@ -90,7 +90,7 @@ PageAllocator* HeapBase::page_allocator() const { ...@@ -90,7 +90,7 @@ PageAllocator* HeapBase::page_allocator() const {
} }
size_t HeapBase::ObjectPayloadSize() const { size_t HeapBase::ObjectPayloadSize() const {
return ObjectSizeCounter().GetSize(const_cast<RawHeap*>(&raw_heap())); return ObjectSizeCounter().GetSize(const_cast<RawHeap&>(raw_heap()));
} }
void HeapBase::AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded() { void HeapBase::AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded() {
......
...@@ -90,7 +90,7 @@ HeapStatistics HeapStatisticsCollector::CollectStatistics(HeapBase* heap) { ...@@ -90,7 +90,7 @@ HeapStatistics HeapStatisticsCollector::CollectStatistics(HeapBase* heap) {
stats.detail_level = HeapStatistics::DetailLevel::kDetailed; stats.detail_level = HeapStatistics::DetailLevel::kDetailed;
current_stats_ = &stats; current_stats_ = &stats;
Traverse(&heap->raw_heap()); Traverse(heap->raw_heap());
FinalizeSpace(current_stats_, &current_space_stats_, &current_page_stats_); FinalizeSpace(current_stats_, &current_space_stats_, &current_page_stats_);
DCHECK_EQ(heap->stats_collector()->allocated_memory_size(), DCHECK_EQ(heap->stats_collector()->allocated_memory_size(),
...@@ -98,20 +98,20 @@ HeapStatistics HeapStatisticsCollector::CollectStatistics(HeapBase* heap) { ...@@ -98,20 +98,20 @@ HeapStatistics HeapStatisticsCollector::CollectStatistics(HeapBase* heap) {
return stats; return stats;
} }
bool HeapStatisticsCollector::VisitNormalPageSpace(NormalPageSpace* space) { bool HeapStatisticsCollector::VisitNormalPageSpace(NormalPageSpace& space) {
DCHECK_EQ(0u, space->linear_allocation_buffer().size()); DCHECK_EQ(0u, space.linear_allocation_buffer().size());
FinalizeSpace(current_stats_, &current_space_stats_, &current_page_stats_); FinalizeSpace(current_stats_, &current_space_stats_, &current_page_stats_);
current_space_stats_ = current_space_stats_ =
InitializeSpace(current_stats_, GetNormalPageSpaceName(space->index())); InitializeSpace(current_stats_, GetNormalPageSpaceName(space.index()));
space->free_list().CollectStatistics(current_space_stats_->free_list_stats); space.free_list().CollectStatistics(current_space_stats_->free_list_stats);
return false; return false;
} }
bool HeapStatisticsCollector::VisitLargePageSpace(LargePageSpace* space) { bool HeapStatisticsCollector::VisitLargePageSpace(LargePageSpace& space) {
FinalizeSpace(current_stats_, &current_space_stats_, &current_page_stats_); FinalizeSpace(current_stats_, &current_space_stats_, &current_page_stats_);
current_space_stats_ = InitializeSpace(current_stats_, "LargePageSpace"); current_space_stats_ = InitializeSpace(current_stats_, "LargePageSpace");
...@@ -119,7 +119,7 @@ bool HeapStatisticsCollector::VisitLargePageSpace(LargePageSpace* space) { ...@@ -119,7 +119,7 @@ bool HeapStatisticsCollector::VisitLargePageSpace(LargePageSpace* space) {
return false; return false;
} }
bool HeapStatisticsCollector::VisitNormalPage(NormalPage* page) { bool HeapStatisticsCollector::VisitNormalPage(NormalPage& page) {
DCHECK_NOT_NULL(current_space_stats_); DCHECK_NOT_NULL(current_space_stats_);
FinalizePage(current_space_stats_, &current_page_stats_); FinalizePage(current_space_stats_, &current_page_stats_);
current_space_stats_->page_stats.emplace_back( current_space_stats_->page_stats.emplace_back(
...@@ -128,11 +128,11 @@ bool HeapStatisticsCollector::VisitNormalPage(NormalPage* page) { ...@@ -128,11 +128,11 @@ bool HeapStatisticsCollector::VisitNormalPage(NormalPage* page) {
return false; return false;
} }
bool HeapStatisticsCollector::VisitLargePage(LargePage* page) { bool HeapStatisticsCollector::VisitLargePage(LargePage& page) {
DCHECK_NOT_NULL(current_space_stats_); DCHECK_NOT_NULL(current_space_stats_);
FinalizePage(current_space_stats_, &current_page_stats_); FinalizePage(current_space_stats_, &current_page_stats_);
HeapObjectHeader* object_header = page->ObjectHeader(); HeapObjectHeader* object_header = page.ObjectHeader();
size_t object_size = page->PayloadSize(); size_t object_size = page.PayloadSize();
RecordObjectType(current_space_stats_, object_header, object_size); RecordObjectType(current_space_stats_, object_header, object_size);
size_t allocated_size = LargePage::AllocationSize(object_size); size_t allocated_size = LargePage::AllocationSize(object_size);
current_space_stats_->physical_size_bytes += allocated_size; current_space_stats_->physical_size_bytes += allocated_size;
...@@ -143,13 +143,13 @@ bool HeapStatisticsCollector::VisitLargePage(LargePage* page) { ...@@ -143,13 +143,13 @@ bool HeapStatisticsCollector::VisitLargePage(LargePage* page) {
return true; return true;
} }
bool HeapStatisticsCollector::VisitHeapObjectHeader(HeapObjectHeader* header) { bool HeapStatisticsCollector::VisitHeapObjectHeader(HeapObjectHeader& header) {
DCHECK(!header->IsLargeObject()); DCHECK(!header.IsLargeObject());
DCHECK_NOT_NULL(current_space_stats_); DCHECK_NOT_NULL(current_space_stats_);
DCHECK_NOT_NULL(current_page_stats_); DCHECK_NOT_NULL(current_page_stats_);
if (header->IsFree()) return true; if (header.IsFree()) return true;
size_t object_size = header->AllocatedSize(); size_t object_size = header.AllocatedSize();
RecordObjectType(current_space_stats_, header, object_size); RecordObjectType(current_space_stats_, &header, object_size);
current_page_stats_->used_size_bytes += object_size; current_page_stats_->used_size_bytes += object_size;
return true; return true;
} }
......
...@@ -18,11 +18,11 @@ class HeapStatisticsCollector : private HeapVisitor<HeapStatisticsCollector> { ...@@ -18,11 +18,11 @@ class HeapStatisticsCollector : private HeapVisitor<HeapStatisticsCollector> {
HeapStatistics CollectStatistics(HeapBase*); HeapStatistics CollectStatistics(HeapBase*);
private: private:
bool VisitNormalPageSpace(NormalPageSpace*); bool VisitNormalPageSpace(NormalPageSpace&);
bool VisitLargePageSpace(LargePageSpace*); bool VisitLargePageSpace(LargePageSpace&);
bool VisitNormalPage(NormalPage*); bool VisitNormalPage(NormalPage&);
bool VisitLargePage(LargePage*); bool VisitLargePage(LargePage&);
bool VisitHeapObjectHeader(HeapObjectHeader*); bool VisitHeapObjectHeader(HeapObjectHeader&);
HeapStatistics* current_stats_; HeapStatistics* current_stats_;
HeapStatistics::SpaceStatistics* current_space_stats_ = nullptr; HeapStatistics::SpaceStatistics* current_space_stats_ = nullptr;
......
...@@ -19,34 +19,34 @@ namespace internal { ...@@ -19,34 +19,34 @@ namespace internal {
template <typename Derived> template <typename Derived>
class HeapVisitor { class HeapVisitor {
public: public:
void Traverse(RawHeap* heap) { void Traverse(RawHeap& heap) {
if (VisitHeapImpl(heap)) return; if (VisitHeapImpl(heap)) return;
for (auto& space : *heap) { for (auto& space : heap) {
Traverse(space.get()); Traverse(*space.get());
} }
} }
void Traverse(BaseSpace* space) { void Traverse(BaseSpace& space) {
const bool is_stopped = const bool is_stopped =
space->is_large() space.is_large()
? VisitLargePageSpaceImpl(&LargePageSpace::From(*space)) ? VisitLargePageSpaceImpl(LargePageSpace::From(space))
: VisitNormalPageSpaceImpl(&NormalPageSpace::From(*space)); : VisitNormalPageSpaceImpl(NormalPageSpace::From(space));
if (is_stopped) return; if (is_stopped) return;
for (auto* page : *space) { for (auto* page : space) {
Traverse(page); Traverse(*page);
} }
} }
void Traverse(BasePage* page) { void Traverse(BasePage& page) {
if (page->is_large()) { if (page.is_large()) {
auto* large_page = LargePage::From(page); auto* large_page = LargePage::From(&page);
if (VisitLargePageImpl(large_page)) return; if (VisitLargePageImpl(*large_page)) return;
VisitHeapObjectHeaderImpl(large_page->ObjectHeader()); VisitHeapObjectHeaderImpl(*large_page->ObjectHeader());
} else { } else {
auto* normal_page = NormalPage::From(page); auto* normal_page = NormalPage::From(&page);
if (VisitNormalPageImpl(normal_page)) return; if (VisitNormalPageImpl(*normal_page)) return;
for (auto& header : *normal_page) { for (auto& header : *normal_page) {
VisitHeapObjectHeaderImpl(&header); VisitHeapObjectHeaderImpl(header);
} }
} }
} }
...@@ -54,31 +54,31 @@ class HeapVisitor { ...@@ -54,31 +54,31 @@ class HeapVisitor {
protected: protected:
// Visitor functions return true if no deeper processing is required. // Visitor functions return true if no deeper processing is required.
// Users are supposed to override functions that need special treatment. // Users are supposed to override functions that need special treatment.
bool VisitHeap(RawHeap*) { return false; } bool VisitHeap(RawHeap&) { return false; }
bool VisitNormalPageSpace(NormalPageSpace*) { return false; } bool VisitNormalPageSpace(NormalPageSpace&) { return false; }
bool VisitLargePageSpace(LargePageSpace*) { return false; } bool VisitLargePageSpace(LargePageSpace&) { return false; }
bool VisitNormalPage(NormalPage*) { return false; } bool VisitNormalPage(NormalPage&) { return false; }
bool VisitLargePage(LargePage*) { return false; } bool VisitLargePage(LargePage&) { return false; }
bool VisitHeapObjectHeader(HeapObjectHeader*) { return false; } bool VisitHeapObjectHeader(HeapObjectHeader&) { return false; }
private: private:
Derived& ToDerived() { return static_cast<Derived&>(*this); } Derived& ToDerived() { return static_cast<Derived&>(*this); }
bool VisitHeapImpl(RawHeap* heap) { return ToDerived().VisitHeap(heap); } bool VisitHeapImpl(RawHeap& heap) { return ToDerived().VisitHeap(heap); }
bool VisitNormalPageSpaceImpl(NormalPageSpace* space) { bool VisitNormalPageSpaceImpl(NormalPageSpace& space) {
return ToDerived().VisitNormalPageSpace(space); return ToDerived().VisitNormalPageSpace(space);
} }
bool VisitLargePageSpaceImpl(LargePageSpace* space) { bool VisitLargePageSpaceImpl(LargePageSpace& space) {
return ToDerived().VisitLargePageSpace(space); return ToDerived().VisitLargePageSpace(space);
} }
bool VisitNormalPageImpl(NormalPage* page) { bool VisitNormalPageImpl(NormalPage& page) {
return ToDerived().VisitNormalPage(page); return ToDerived().VisitNormalPage(page);
} }
bool VisitLargePageImpl(LargePage* page) { bool VisitLargePageImpl(LargePage& page) {
return ToDerived().VisitLargePage(page); return ToDerived().VisitLargePage(page);
} }
bool VisitHeapObjectHeaderImpl(HeapObjectHeader* hoh) { bool VisitHeapObjectHeaderImpl(HeapObjectHeader& header) {
return ToDerived().VisitHeapObjectHeader(hoh); return ToDerived().VisitHeapObjectHeader(header);
} }
}; };
......
...@@ -62,11 +62,11 @@ class Unmarker final : private HeapVisitor<Unmarker> { ...@@ -62,11 +62,11 @@ class Unmarker final : private HeapVisitor<Unmarker> {
friend class HeapVisitor<Unmarker>; friend class HeapVisitor<Unmarker>;
public: public:
explicit Unmarker(RawHeap* heap) { Traverse(heap); } explicit Unmarker(RawHeap& heap) { Traverse(heap); }
private: private:
bool VisitHeapObjectHeader(HeapObjectHeader* header) { bool VisitHeapObjectHeader(HeapObjectHeader& header) {
if (header->IsMarked()) header->Unmark(); if (header.IsMarked()) header.Unmark();
return true; return true;
} }
}; };
...@@ -157,7 +157,7 @@ void Heap::StartGarbageCollection(Config config) { ...@@ -157,7 +157,7 @@ void Heap::StartGarbageCollection(Config config) {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
if (config.collection_type == Config::CollectionType::kMajor) if (config.collection_type == Config::CollectionType::kMajor)
Unmarker unmarker(&raw_heap()); Unmarker unmarker(raw_heap());
#endif #endif
const Marker::MarkingConfig marking_config{ const Marker::MarkingConfig marking_config{
......
...@@ -24,7 +24,7 @@ MarkingVerifierBase::MarkingVerifierBase( ...@@ -24,7 +24,7 @@ MarkingVerifierBase::MarkingVerifierBase(
void MarkingVerifierBase::Run(Heap::Config::StackState stack_state, void MarkingVerifierBase::Run(Heap::Config::StackState stack_state,
uintptr_t stack_end, uintptr_t stack_end,
size_t expected_marked_bytes) { size_t expected_marked_bytes) {
Traverse(&heap_.raw_heap()); Traverse(heap_.raw_heap());
if (stack_state == Heap::Config::StackState::kMayContainHeapPointers) { if (stack_state == Heap::Config::StackState::kMayContainHeapPointers) {
in_construction_objects_ = &in_construction_objects_stack_; in_construction_objects_ = &in_construction_objects_stack_;
heap_.stack()->IteratePointersUnsafe(this, stack_end); heap_.stack()->IteratePointersUnsafe(this, stack_end);
...@@ -87,22 +87,22 @@ void MarkingVerifierBase::VisitPointer(const void* address) { ...@@ -87,22 +87,22 @@ void MarkingVerifierBase::VisitPointer(const void* address) {
TraceConservativelyIfNeeded(address); TraceConservativelyIfNeeded(address);
} }
bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader* header) { bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) {
// Verify only non-free marked objects. // Verify only non-free marked objects.
if (!header->IsMarked()) return true; if (!header.IsMarked()) return true;
DCHECK(!header->IsFree()); DCHECK(!header.IsFree());
verification_state_.SetCurrentParent(header); verification_state_.SetCurrentParent(&header);
if (!header->IsInConstruction()) { if (!header.IsInConstruction()) {
header->Trace(visitor_.get()); header.Trace(visitor_.get());
} else { } else {
// Dispatches to conservative tracing implementation. // Dispatches to conservative tracing implementation.
TraceConservativelyIfNeeded(*header); TraceConservativelyIfNeeded(header);
} }
found_marked_bytes_ += ObjectView(*header).Size() + sizeof(HeapObjectHeader); found_marked_bytes_ += ObjectView(header).Size() + sizeof(HeapObjectHeader);
verification_state_.SetCurrentParent(nullptr); verification_state_.SetCurrentParent(nullptr);
......
...@@ -51,7 +51,7 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase ...@@ -51,7 +51,7 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
TraceConservativelyCallback) final; TraceConservativelyCallback) final;
void VisitPointer(const void*) final; void VisitPointer(const void*) final;
bool VisitHeapObjectHeader(HeapObjectHeader*); bool VisitHeapObjectHeader(HeapObjectHeader&);
VerificationState& verification_state_; VerificationState& verification_state_;
std::unique_ptr<cppgc::Visitor> visitor_; std::unique_ptr<cppgc::Visitor> visitor_;
......
...@@ -188,10 +188,10 @@ void ObjectAllocator::ResetLinearAllocationBuffers() { ...@@ -188,10 +188,10 @@ void ObjectAllocator::ResetLinearAllocationBuffers() {
public: public:
explicit Resetter(StatsCollector* stats) : stats_collector_(stats) {} explicit Resetter(StatsCollector* stats) : stats_collector_(stats) {}
bool VisitLargePageSpace(LargePageSpace*) { return true; } bool VisitLargePageSpace(LargePageSpace&) { return true; }
bool VisitNormalPageSpace(NormalPageSpace* space) { bool VisitNormalPageSpace(NormalPageSpace& space) {
ReplaceLinearAllocationBuffer(*space, *stats_collector_, nullptr, 0); ReplaceLinearAllocationBuffer(space, *stats_collector_, nullptr, 0);
return true; return true;
} }
...@@ -199,7 +199,7 @@ void ObjectAllocator::ResetLinearAllocationBuffers() { ...@@ -199,7 +199,7 @@ void ObjectAllocator::ResetLinearAllocationBuffers() {
StatsCollector* stats_collector_; StatsCollector* stats_collector_;
} visitor(stats_collector_); } visitor(stats_collector_);
visitor.Traverse(raw_heap_); visitor.Traverse(*raw_heap_);
} }
void ObjectAllocator::Terminate() { void ObjectAllocator::Terminate() {
......
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
#include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-visitor.h" #include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/object-view.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -20,14 +21,10 @@ class UnmarkedObjectsPoisoner : public HeapVisitor<UnmarkedObjectsPoisoner> { ...@@ -20,14 +21,10 @@ class UnmarkedObjectsPoisoner : public HeapVisitor<UnmarkedObjectsPoisoner> {
friend class HeapVisitor<UnmarkedObjectsPoisoner>; friend class HeapVisitor<UnmarkedObjectsPoisoner>;
private: private:
bool VisitHeapObjectHeader(HeapObjectHeader* header) { bool VisitHeapObjectHeader(HeapObjectHeader& header) {
if (header->IsFree() || header->IsMarked()) return true; if (header.IsFree() || header.IsMarked()) return true;
const size_t size = ASAN_POISON_MEMORY_REGION(header.ObjectStart(), ObjectView(header).Size());
header->IsLargeObject()
? LargePage::From(BasePage::FromPayload(header))->ObjectSize()
: header->ObjectSize();
ASAN_POISON_MEMORY_REGION(header->ObjectStart(), size);
return true; return true;
} }
}; };
......
...@@ -37,25 +37,25 @@ class ObjectStartBitmapVerifier ...@@ -37,25 +37,25 @@ class ObjectStartBitmapVerifier
friend class HeapVisitor<ObjectStartBitmapVerifier>; friend class HeapVisitor<ObjectStartBitmapVerifier>;
public: public:
void Verify(RawHeap* heap) { Traverse(heap); } void Verify(RawHeap& heap) { Traverse(heap); }
private: private:
bool VisitNormalPage(NormalPage* page) { bool VisitNormalPage(NormalPage& page) {
// Remember bitmap and reset previous pointer. // Remember bitmap and reset previous pointer.
bitmap_ = &page->object_start_bitmap(); bitmap_ = &page.object_start_bitmap();
prev_ = nullptr; prev_ = nullptr;
return false; return false;
} }
bool VisitHeapObjectHeader(HeapObjectHeader* header) { bool VisitHeapObjectHeader(HeapObjectHeader& header) {
if (header->IsLargeObject()) return true; if (header.IsLargeObject()) return true;
auto* raw_header = reinterpret_cast<ConstAddress>(header); auto* raw_header = reinterpret_cast<ConstAddress>(&header);
CHECK(bitmap_->CheckBit(raw_header)); CHECK(bitmap_->CheckBit(raw_header));
if (prev_) { if (prev_) {
CHECK_EQ(prev_, bitmap_->FindHeader(raw_header - 1)); CHECK_EQ(prev_, bitmap_->FindHeader(raw_header - 1));
} }
prev_ = header; prev_ = &header;
return true; return true;
} }
...@@ -337,12 +337,12 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { ...@@ -337,12 +337,12 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
void Sweep() { void Sweep() {
for (SpaceState& state : *states_) { for (SpaceState& state : *states_) {
while (auto page = state.unswept_pages.Pop()) { while (auto page = state.unswept_pages.Pop()) {
SweepPage(*page); SweepPage(**page);
} }
} }
} }
void SweepPage(BasePage* page) { Traverse(page); } void SweepPage(BasePage& page) { Traverse(page); }
bool SweepWithDeadline(double deadline_in_seconds) { bool SweepWithDeadline(double deadline_in_seconds) {
DCHECK(platform_); DCHECK(platform_);
...@@ -378,7 +378,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { ...@@ -378,7 +378,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
static constexpr size_t kDeadlineCheckInterval = 8; static constexpr size_t kDeadlineCheckInterval = 8;
size_t page_count = 1; size_t page_count = 1;
while (auto page = state->unswept_pages.Pop()) { while (auto page = state->unswept_pages.Pop()) {
Traverse(*page); Traverse(**page);
if (page_count % kDeadlineCheckInterval == 0 && if (page_count % kDeadlineCheckInterval == 0 &&
deadline_in_seconds <= platform_->MonotonicallyIncreasingTime()) { deadline_in_seconds <= platform_->MonotonicallyIncreasingTime()) {
return false; return false;
...@@ -389,27 +389,27 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { ...@@ -389,27 +389,27 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
return true; return true;
} }
bool VisitNormalPage(NormalPage* page) { bool VisitNormalPage(NormalPage& page) {
const InlinedFinalizationBuilder::ResultType result = const InlinedFinalizationBuilder::ResultType result =
SweepNormalPage<InlinedFinalizationBuilder>(page); SweepNormalPage<InlinedFinalizationBuilder>(&page);
if (result.is_empty) { if (result.is_empty) {
NormalPage::Destroy(page); NormalPage::Destroy(&page);
} else { } else {
page->space().AddPage(page); page.space().AddPage(&page);
largest_new_free_list_entry_ = std::max( largest_new_free_list_entry_ = std::max(
result.largest_new_free_list_entry, largest_new_free_list_entry_); result.largest_new_free_list_entry, largest_new_free_list_entry_);
} }
return true; return true;
} }
bool VisitLargePage(LargePage* page) { bool VisitLargePage(LargePage& page) {
HeapObjectHeader* header = page->ObjectHeader(); HeapObjectHeader* header = page.ObjectHeader();
if (header->IsMarked()) { if (header->IsMarked()) {
StickyUnmark(header); StickyUnmark(header);
page->space().AddPage(page); page.space().AddPage(&page);
} else { } else {
header->Finalize(); header->Finalize();
LargePage::Destroy(page); LargePage::Destroy(&page);
} }
return true; return true;
} }
...@@ -433,7 +433,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -433,7 +433,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
for (SpaceState& state : *states_) { for (SpaceState& state : *states_) {
while (auto page = state.unswept_pages.Pop()) { while (auto page = state.unswept_pages.Pop()) {
Traverse(*page); Traverse(**page);
if (delegate->ShouldYield()) return; if (delegate->ShouldYield()) return;
} }
} }
...@@ -445,32 +445,32 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -445,32 +445,32 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
} }
private: private:
bool VisitNormalPage(NormalPage* page) { bool VisitNormalPage(NormalPage& page) {
SpaceState::SweptPageState sweep_result = SpaceState::SweptPageState sweep_result =
SweepNormalPage<DeferredFinalizationBuilder>(page); SweepNormalPage<DeferredFinalizationBuilder>(&page);
const size_t space_index = page->space().index(); const size_t space_index = page.space().index();
DCHECK_GT(states_->size(), space_index); DCHECK_GT(states_->size(), space_index);
SpaceState& space_state = (*states_)[space_index]; SpaceState& space_state = (*states_)[space_index];
space_state.swept_unfinalized_pages.Push(std::move(sweep_result)); space_state.swept_unfinalized_pages.Push(std::move(sweep_result));
return true; return true;
} }
bool VisitLargePage(LargePage* page) { bool VisitLargePage(LargePage& page) {
HeapObjectHeader* header = page->ObjectHeader(); HeapObjectHeader* header = page.ObjectHeader();
if (header->IsMarked()) { if (header->IsMarked()) {
StickyUnmark(header); StickyUnmark(header);
page->space().AddPage(page); page.space().AddPage(&page);
return true; return true;
} }
if (!header->IsFinalizable()) { if (!header->IsFinalizable()) {
LargePage::Destroy(page); LargePage::Destroy(&page);
return true; return true;
} }
const size_t space_index = page->space().index(); const size_t space_index = page.space().index();
DCHECK_GT(states_->size(), space_index); DCHECK_GT(states_->size(), space_index);
SpaceState& state = (*states_)[space_index]; SpaceState& state = (*states_)[space_index];
state.swept_unfinalized_pages.Push( state.swept_unfinalized_pages.Push(
{page, {page->ObjectHeader()}, {}, {}, true}); {&page, {page.ObjectHeader()}, {}, {}, true});
return true; return true;
} }
...@@ -493,12 +493,12 @@ class PrepareForSweepVisitor final ...@@ -493,12 +493,12 @@ class PrepareForSweepVisitor final
: states_(states), : states_(states),
compactable_space_handling_(compactable_space_handling) {} compactable_space_handling_(compactable_space_handling) {}
bool VisitNormalPageSpace(NormalPageSpace* space) { bool VisitNormalPageSpace(NormalPageSpace& space) {
if ((compactable_space_handling_ == CompactableSpaceHandling::kIgnore) && if ((compactable_space_handling_ == CompactableSpaceHandling::kIgnore) &&
space->is_compactable()) space.is_compactable())
return true; return true;
DCHECK(!space->linear_allocation_buffer().size()); DCHECK(!space.linear_allocation_buffer().size());
space->free_list().Clear(); space.free_list().Clear();
#ifdef V8_USE_ADDRESS_SANITIZER #ifdef V8_USE_ADDRESS_SANITIZER
UnmarkedObjectsPoisoner().Traverse(space); UnmarkedObjectsPoisoner().Traverse(space);
#endif // V8_USE_ADDRESS_SANITIZER #endif // V8_USE_ADDRESS_SANITIZER
...@@ -506,7 +506,7 @@ class PrepareForSweepVisitor final ...@@ -506,7 +506,7 @@ class PrepareForSweepVisitor final
return true; return true;
} }
bool VisitLargePageSpace(LargePageSpace* space) { bool VisitLargePageSpace(LargePageSpace& space) {
#ifdef V8_USE_ADDRESS_SANITIZER #ifdef V8_USE_ADDRESS_SANITIZER
UnmarkedObjectsPoisoner().Traverse(space); UnmarkedObjectsPoisoner().Traverse(space);
#endif // V8_USE_ADDRESS_SANITIZER #endif // V8_USE_ADDRESS_SANITIZER
...@@ -515,10 +515,10 @@ class PrepareForSweepVisitor final ...@@ -515,10 +515,10 @@ class PrepareForSweepVisitor final
} }
private: private:
void ExtractPages(BaseSpace* space) { void ExtractPages(BaseSpace& space) {
BaseSpace::Pages space_pages = space->RemoveAllPages(); BaseSpace::Pages space_pages = space.RemoveAllPages();
(*states_)[space->index()].unswept_pages.Insert(space_pages.begin(), (*states_)[space.index()].unswept_pages.Insert(space_pages.begin(),
space_pages.end()); space_pages.end());
} }
SpaceStates* states_; SpaceStates* states_;
...@@ -543,10 +543,10 @@ class Sweeper::SweeperImpl final { ...@@ -543,10 +543,10 @@ class Sweeper::SweeperImpl final {
platform_ = platform; platform_ = platform;
#if DEBUG #if DEBUG
// Verify bitmap for all spaces regardless of |compactable_space_handling|. // Verify bitmap for all spaces regardless of |compactable_space_handling|.
ObjectStartBitmapVerifier().Verify(&heap_); ObjectStartBitmapVerifier().Verify(heap_);
#endif #endif
PrepareForSweepVisitor(&space_states_, config.compactable_space_handling) PrepareForSweepVisitor(&space_states_, config.compactable_space_handling)
.Traverse(&heap_); .Traverse(heap_);
if (config.sweeping_type == SweepingConfig::SweepingType::kAtomic) { if (config.sweeping_type == SweepingConfig::SweepingType::kAtomic) {
Finish(); Finish();
...@@ -587,7 +587,7 @@ class Sweeper::SweeperImpl final { ...@@ -587,7 +587,7 @@ class Sweeper::SweeperImpl final {
// unswept page. This also helps out the concurrent sweeper. // unswept page. This also helps out the concurrent sweeper.
MutatorThreadSweeper sweeper(&space_states_, platform_); MutatorThreadSweeper sweeper(&space_states_, platform_);
while (auto page = space_state.unswept_pages.Pop()) { while (auto page = space_state.unswept_pages.Pop()) {
sweeper.SweepPage(*page); sweeper.SweepPage(**page);
if (size <= sweeper.largest_new_free_list_entry()) return true; if (size <= sweeper.largest_new_free_list_entry()) return true;
} }
} }
......
...@@ -200,18 +200,18 @@ class ObjectSizeCounter final : private HeapVisitor<ObjectSizeCounter> { ...@@ -200,18 +200,18 @@ class ObjectSizeCounter final : private HeapVisitor<ObjectSizeCounter> {
friend class HeapVisitor<ObjectSizeCounter>; friend class HeapVisitor<ObjectSizeCounter>;
public: public:
size_t GetSize(RawHeap* heap) { size_t GetSize(RawHeap& heap) {
Traverse(heap); Traverse(heap);
return accumulated_size_; return accumulated_size_;
} }
private: private:
static size_t ObjectSize(const HeapObjectHeader* header) { static size_t ObjectSize(const HeapObjectHeader& header) {
return ObjectView(*header).Size(); return ObjectView(header).Size();
} }
bool VisitHeapObjectHeader(HeapObjectHeader* header) { bool VisitHeapObjectHeader(HeapObjectHeader& header) {
if (header->IsFree()) return true; if (header.IsFree()) return true;
accumulated_size_ += ObjectSize(header); accumulated_size_ += ObjectSize(header);
return true; return true;
} }
...@@ -226,7 +226,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) { ...@@ -226,7 +226,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
"Allocation granularity is expected to be a multiple of 4"); "Allocation granularity is expected to be a multiple of 4");
Heap* heap = internal::Heap::From(GetHeap()); Heap* heap = internal::Heap::From(GetHeap());
size_t initial_object_payload_size = size_t initial_object_payload_size =
ObjectSizeCounter().GetSize(&heap->raw_heap()); ObjectSizeCounter().GetSize(heap->raw_heap());
{ {
// When the test starts there may already have been leaked some memory // When the test starts there may already have been leaked some memory
// on the heap, so we establish a base line. // on the heap, so we establish a base line.
...@@ -248,7 +248,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) { ...@@ -248,7 +248,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
size_t total = 96; size_t total = 96;
EXPECT_EQ(base_level + total, EXPECT_EQ(base_level + total,
ObjectSizeCounter().GetSize(&heap->raw_heap())); ObjectSizeCounter().GetSize(heap->raw_heap()));
if (test_pages_allocated) { if (test_pages_allocated) {
EXPECT_EQ(kPageSize * 2, EXPECT_EQ(kPageSize * 2,
heap->stats_collector()->allocated_memory_size()); heap->stats_collector()->allocated_memory_size());
...@@ -269,7 +269,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) { ...@@ -269,7 +269,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
PreciseGC(); PreciseGC();
size_t total = 0; size_t total = 0;
size_t base_level = ObjectSizeCounter().GetSize(&heap->raw_heap()); size_t base_level = ObjectSizeCounter().GetSize(heap->raw_heap());
bool test_pages_allocated = !base_level; bool test_pages_allocated = !base_level;
if (test_pages_allocated) { if (test_pages_allocated) {
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size()); EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size());
...@@ -292,7 +292,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) { ...@@ -292,7 +292,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
// The allocations in the loop may trigger GC with lazy sweeping. // The allocations in the loop may trigger GC with lazy sweeping.
heap->sweeper().FinishIfRunning(); heap->sweeper().FinishIfRunning();
EXPECT_EQ(base_level + total, EXPECT_EQ(base_level + total,
ObjectSizeCounter().GetSize(&heap->raw_heap())); ObjectSizeCounter().GetSize(heap->raw_heap()));
if (test_pages_allocated) { if (test_pages_allocated) {
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() & EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
(kPageSize - 1)); (kPageSize - 1));
...@@ -310,7 +310,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) { ...@@ -310,7 +310,7 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
total += 96; total += 96;
EXPECT_EQ(base_level + total, EXPECT_EQ(base_level + total,
ObjectSizeCounter().GetSize(&heap->raw_heap())); ObjectSizeCounter().GetSize(heap->raw_heap()));
if (test_pages_allocated) { if (test_pages_allocated) {
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() & EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
(kPageSize - 1)); (kPageSize - 1));
...@@ -329,13 +329,13 @@ TEST_F(WorkloadsTest, BasicFunctionality) { ...@@ -329,13 +329,13 @@ TEST_F(WorkloadsTest, BasicFunctionality) {
PreciseGC(); PreciseGC();
total -= big; total -= big;
EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(&heap->raw_heap())); EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(heap->raw_heap()));
if (test_pages_allocated) { if (test_pages_allocated) {
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() & EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
(kPageSize - 1)); (kPageSize - 1));
} }
EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(&heap->raw_heap())); EXPECT_EQ(base_level + total, ObjectSizeCounter().GetSize(heap->raw_heap()));
if (test_pages_allocated) { if (test_pages_allocated) {
EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() & EXPECT_EQ(0ul, heap->stats_collector()->allocated_memory_size() &
(kPageSize - 1)); (kPageSize - 1));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment