Commit f2ece54b authored by Anton Bikineev's avatar Anton Bikineev Committed by Commit Bot

cppgc: Introduce HeapVisitor

HeapVisitor is a simple visitor implementing both visitation and accept
interfaces. HeapVisitor is aimed to replace functions that traverse the
heap in Blink: PrepareForSweep, Sweep, FinalizeSweep, MakeConsistentForGC,
ObjectPayloadSize, VerifyObjectStartBitmap, CollectStatistics, etc.

This CL also introduces a Heap::ObjectPayloadSize function.

Bug: chromium:1056170
Change-Id: I85a2b8b572486bea29704fc436b002d8f7405201
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2165733
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67458}
parent ec5149ef
......@@ -4033,6 +4033,7 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/heap-page.h",
"src/heap/cppgc/heap-space.cc",
"src/heap/cppgc/heap-space.h",
"src/heap/cppgc/heap-visitor.h",
"src/heap/cppgc/heap.cc",
"src/heap/cppgc/heap.h",
"src/heap/cppgc/liveness-broker.cc",
......
......@@ -80,6 +80,19 @@ NormalPage::NormalPage(Heap* heap, BaseSpace* space)
NormalPage::~NormalPage() = default;
NormalPage::iterator NormalPage::begin() {
const auto& lab = NormalPageSpace::From(space())->linear_allocation_buffer();
return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadStart()),
lab.start(), lab.size());
}
NormalPage::const_iterator NormalPage::begin() const {
const auto& lab = NormalPageSpace::From(space())->linear_allocation_buffer();
return const_iterator(
reinterpret_cast<const HeapObjectHeader*>(PayloadStart()), lab.start(),
lab.size());
}
Address NormalPage::PayloadStart() {
return AlignAddress((reinterpret_cast<Address>(this + 1)),
kAllocationGranularity);
......
......@@ -7,6 +7,7 @@
#include "src/base/iterator.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header.h"
namespace cppgc {
......@@ -49,7 +50,15 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
template <typename T>
class IteratorImpl : v8::base::iterator<std::forward_iterator_tag, T> {
public:
explicit IteratorImpl(T* p) : p_(p) {}
explicit IteratorImpl(T* p, ConstAddress lab_start = nullptr,
size_t lab_size = 0)
: p_(p), lab_start_(lab_start), lab_size_(lab_size) {
DCHECK(p);
DCHECK_EQ(0, (lab_size & (sizeof(T) - 1)));
if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
p_ += (lab_size_ / sizeof(T));
}
}
T& operator*() { return *p_; }
const T& operator*() const { return *p_; }
......@@ -58,19 +67,26 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
bool operator!=(IteratorImpl other) const { return !(*this == other); }
IteratorImpl& operator++() {
p_ += (p_->GetSize() / sizeof(T));
const size_t size = p_->GetSize();
DCHECK_EQ(0, (size & (sizeof(T) - 1)));
p_ += (size / sizeof(T));
if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
p_ += (lab_size_ / sizeof(T));
}
return *this;
}
IteratorImpl operator++(int) {
IteratorImpl temp(*this);
p_ += (p_->GetSize() / sizeof(T));
++(*this);
return temp;
}
T* base() { return p_; }
T* base() const { return p_; }
private:
T* p_;
ConstAddress lab_start_;
size_t lab_size_;
};
public:
......@@ -83,13 +99,17 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
// corresponding space (i.e. be swept when called).
static void Destroy(NormalPage*);
iterator begin() {
return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadStart()));
static NormalPage* From(BasePage* page) {
DCHECK(!page->is_large());
return static_cast<NormalPage*>(page);
}
const_iterator begin() const {
return const_iterator(
reinterpret_cast<const HeapObjectHeader*>(PayloadStart()));
static const NormalPage* From(const BasePage* page) {
return From(const_cast<BasePage*>(page));
}
iterator begin();
const_iterator begin() const;
iterator end() {
return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadEnd()));
}
......@@ -118,6 +138,14 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage {
// corresponding space (i.e. be swept when called).
static void Destroy(LargePage*);
static LargePage* From(BasePage* page) {
DCHECK(page->is_large());
return static_cast<LargePage*>(page);
}
static const LargePage* From(const BasePage* page) {
return From(const_cast<BasePage*>(page));
}
HeapObjectHeader* ObjectHeader();
const HeapObjectHeader* ObjectHeader() const;
......
......@@ -60,27 +60,35 @@ class V8_EXPORT_PRIVATE NormalPageSpace final : public BaseSpace {
public:
class LinearAllocationBuffer {
public:
void* Allocate(size_t alloc_size) {
Address Allocate(size_t alloc_size) {
DCHECK_GE(size_, alloc_size);
void* result = start_;
Address result = start_;
start_ += alloc_size;
size_ -= alloc_size;
return result;
}
void Set(void* ptr, size_t size) {
start_ = static_cast<uint8_t*>(ptr);
void Set(Address ptr, size_t size) {
start_ = ptr;
size_ = size;
}
void* start() const { return start_; }
Address start() const { return start_; }
size_t size() const { return size_; }
private:
uint8_t* start_ = nullptr;
Address start_ = nullptr;
size_t size_ = 0;
};
static NormalPageSpace* From(BaseSpace* space) {
DCHECK(!space->is_large());
return static_cast<NormalPageSpace*>(space);
}
static const NormalPageSpace* From(const BaseSpace* space) {
return From(const_cast<BaseSpace*>(space));
}
NormalPageSpace(RawHeap* heap, size_t index);
LinearAllocationBuffer& linear_allocation_buffer() { return current_lab_; }
......@@ -98,6 +106,14 @@ class V8_EXPORT_PRIVATE NormalPageSpace final : public BaseSpace {
class V8_EXPORT_PRIVATE LargePageSpace final : public BaseSpace {
public:
static LargePageSpace* From(BaseSpace* space) {
DCHECK(space->is_large());
return static_cast<LargePageSpace*>(space);
}
static const LargePageSpace* From(const BaseSpace* space) {
return From(const_cast<BaseSpace*>(space));
}
LargePageSpace(RawHeap* heap, size_t index);
};
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_HEAP_VISITOR_H_
#define V8_HEAP_CPPGC_HEAP_VISITOR_H_
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-space.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
namespace internal {
// Visitor for heap, which also implements the accept (traverse) interface.
// Implements preorder traversal of the heap. The order of traversal is defined.
// Implemented as a CRTP visitor to avoid virtual calls and support better
// inlining.
template <typename Derived>
class HeapVisitor {
public:
void Traverse(RawHeap* heap) {
if (VisitHeapImpl(heap)) return;
for (auto& space : *heap) {
Traverse(space.get());
}
}
void Traverse(BaseSpace* space) {
const bool is_stopped =
space->is_large()
? VisitLargePageSpaceImpl(LargePageSpace::From(space))
: VisitNormalPageSpaceImpl(NormalPageSpace::From(space));
if (is_stopped) return;
for (auto* page : *space) {
Traverse(page);
}
}
void Traverse(BasePage* page) {
if (page->is_large()) {
auto* large_page = LargePage::From(page);
if (VisitLargePageImpl(large_page)) return;
VisitHeapObjectHeaderImpl(large_page->ObjectHeader());
} else {
auto* normal_page = NormalPage::From(page);
if (VisitNormalPageImpl(normal_page)) return;
for (auto& header : *normal_page) {
VisitHeapObjectHeaderImpl(&header);
}
}
}
protected:
// Visitor functions return true if no deeper processing is required.
// Users are supposed to override functions that need special treatment.
bool VisitHeap(RawHeap*) { return false; }
bool VisitNormalPageSpace(NormalPageSpace*) { return false; }
bool VisitLargePageSpace(LargePageSpace*) { return false; }
bool VisitNormalPage(NormalPage*) { return false; }
bool VisitLargePage(LargePage*) { return false; }
bool VisitHeapObjectHeader(HeapObjectHeader*) { return false; }
private:
Derived& ToDerived() { return static_cast<Derived&>(*this); }
bool VisitHeapImpl(RawHeap* heap) { return ToDerived().VisitHeap(heap); }
bool VisitNormalPageSpaceImpl(NormalPageSpace* space) {
return ToDerived().VisitNormalPageSpace(space);
}
bool VisitLargePageSpaceImpl(LargePageSpace* space) {
return ToDerived().VisitLargePageSpace(space);
}
bool VisitNormalPageImpl(NormalPage* page) {
return ToDerived().VisitNormalPage(page);
}
bool VisitLargePageImpl(LargePage* page) {
return ToDerived().VisitLargePage(page);
}
bool VisitHeapObjectHeaderImpl(HeapObjectHeader* hoh) {
return ToDerived().VisitHeapObjectHeader(hoh);
}
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_HEAP_VISITOR_H_
......@@ -8,7 +8,9 @@
#include "src/base/platform/platform.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/stack.h"
namespace cppgc {
......@@ -25,6 +27,35 @@ constexpr bool NeedsConservativeStackScan(Heap::GCConfig config) {
return config.stack_state == Heap::GCConfig::StackState::kNonEmpty;
}
class ObjectSizeCounter : public HeapVisitor<ObjectSizeCounter> {
friend class HeapVisitor<ObjectSizeCounter>;
public:
size_t GetSize(RawHeap* heap) {
Traverse(heap);
return accumulated_size_;
}
private:
static size_t ObjectSize(const HeapObjectHeader* header) {
const size_t size =
header->IsLargeObject()
? static_cast<const LargePage*>(BasePage::FromPayload(header))
->PayloadSize()
: header->GetSize();
DCHECK_GE(size, sizeof(HeapObjectHeader));
return size - sizeof(HeapObjectHeader);
}
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
if (header->IsFree()) return true;
accumulated_size_ += ObjectSize(header);
return true;
}
size_t accumulated_size_ = 0;
};
} // namespace
// static
......@@ -93,6 +124,10 @@ void Heap::CollectGarbage(GCConfig config) {
}
}
size_t Heap::ObjectPayloadSize() const {
return ObjectSizeCounter().GetSize(const_cast<RawHeap*>(&raw_heap()));
}
Heap::NoGCScope::NoGCScope(Heap* heap) : heap_(heap) { heap_->no_gc_scope_++; }
Heap::NoGCScope::~NoGCScope() { heap_->no_gc_scope_--; }
......
......@@ -98,6 +98,8 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
PageBackend* page_backend() { return page_backend_.get(); }
const PageBackend* page_backend() const { return page_backend_.get(); }
size_t ObjectPayloadSize() const;
private:
bool in_no_gc_scope() const { return no_gc_scope_ > 0; }
bool is_allocation_allowed() const { return no_allocation_scope_ == 0; }
......
......@@ -20,9 +20,8 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
const size_t allocation_size =
RoundUp(size + sizeof(HeapObjectHeader), kAllocationGranularity);
const RawHeap::SpaceType type = GetSpaceIndexForSize(allocation_size);
return AllocateObjectOnSpace(
static_cast<NormalPageSpace*>(raw_heap_->Space(type)), allocation_size,
gcinfo);
return AllocateObjectOnSpace(NormalPageSpace::From(raw_heap_->Space(type)),
allocation_size, gcinfo);
}
// static
......
......@@ -46,8 +46,8 @@ void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size,
// 1. If this allocation is big enough, allocate a large object.
if (size >= kLargeObjectSizeThreshold) {
auto* large_space = static_cast<LargePageSpace*>(
raw_heap_->Space(RawHeap::SpaceType::kLarge));
auto* large_space =
LargePageSpace::From(raw_heap_->Space(RawHeap::SpaceType::kLarge));
return AllocateLargeObject(raw_heap_, large_space, size, gcinfo);
}
......@@ -84,7 +84,7 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace* space, size_t size,
space->free_list().Add({current_lab.start(), current_lab.size()});
}
current_lab.Set(entry.address, entry.size);
current_lab.Set(static_cast<Address>(entry.address), entry.size);
return AllocateObjectOnSpace(space, size, gcinfo);
}
......
......@@ -3,7 +3,13 @@
// found in the LICENSE file.
#include "src/heap/cppgc/heap.h"
#include <algorithm>
#include <iterator>
#include <numeric>
#include "include/cppgc/allocation.h"
#include "src/heap/cppgc/globals.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
......@@ -34,6 +40,13 @@ class Foo : public GarbageCollected<Foo> {
size_t Foo::destructor_callcount;
template <size_t Size>
class GCed : public GarbageCollected<Foo> {
public:
void Visit(cppgc::Visitor*) {}
char buf[Size];
};
} // namespace
TEST_F(GCHeapTest, PreciseGCReclaimsObjectOnStack) {
......@@ -57,7 +70,7 @@ const void* ConservativeGCReturningObject(cppgc::Heap* heap,
} // namespace
TEST_F(GCHeapTest, ConservaitveGCRetainsObjectOnStack) {
TEST_F(GCHeapTest, ConservativeGCRetainsObjectOnStack) {
Foo* volatile object = MakeGarbageCollected<Foo>(GetHeap());
EXPECT_EQ(0u, Foo::destructor_callcount);
EXPECT_EQ(object, ConservativeGCReturningObject(GetHeap(), object));
......@@ -68,5 +81,35 @@ TEST_F(GCHeapTest, ConservaitveGCRetainsObjectOnStack) {
EXPECT_EQ(1u, Foo::destructor_callcount);
}
TEST_F(GCHeapTest, ObjectPayloadSize) {
static constexpr size_t kNumberOfObjectsPerArena = 16;
static constexpr size_t kObjectSizes[] = {1, 32, 64, 128,
2 * kLargeObjectSizeThreshold};
Heap::From(GetHeap())->CollectGarbage();
for (size_t k = 0; k < kNumberOfObjectsPerArena; ++k) {
MakeGarbageCollected<GCed<kObjectSizes[0]>>(GetHeap());
MakeGarbageCollected<GCed<kObjectSizes[1]>>(GetHeap());
MakeGarbageCollected<GCed<kObjectSizes[2]>>(GetHeap());
MakeGarbageCollected<GCed<kObjectSizes[3]>>(GetHeap());
MakeGarbageCollected<GCed<kObjectSizes[4]>>(GetHeap());
}
size_t aligned_object_sizes[arraysize(kObjectSizes)];
std::transform(std::cbegin(kObjectSizes), std::cend(kObjectSizes),
std::begin(aligned_object_sizes), [](size_t size) {
return RoundUp(size, kAllocationGranularity);
});
const size_t expected_size = std::accumulate(
std::cbegin(aligned_object_sizes), std::cend(aligned_object_sizes), 0u,
[](size_t acc, size_t size) {
return acc + kNumberOfObjectsPerArena * size;
});
// TODO(chromium:1056170): Change to EXPECT_EQ when proper sweeping is
// implemented.
EXPECT_LE(expected_size, Heap::From(GetHeap())->ObjectPayloadSize());
}
} // namespace internal
} // namespace cppgc
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment