Commit 22f80fc1 authored by Anton Bikineev's avatar Anton Bikineev Committed by Commit Bot

cppgc: Introduce heap object structure classes

This adds the following:
1) Heap object structure classes: RawHeap, BaseArena and BasePage.
   - freelist
   - linear allocation block
2) ObjectAllocator, a class responsible for object (and page) allocation.

The design doc with UML design: https://bit.ly/2VVTcqc

User defined arenas are followup.

Bug: chromium:1056170
Change-Id: I69a82974bd08e3cf3da90041b1628297cc890891
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2167392
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67425}
parent 451854fa
......@@ -4029,10 +4029,15 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/heap-object-header.h",
"src/heap/cppgc/heap-page.cc",
"src/heap/cppgc/heap-page.h",
"src/heap/cppgc/heap-space.cc",
"src/heap/cppgc/heap-space.h",
"src/heap/cppgc/heap.cc",
"src/heap/cppgc/heap.h",
"src/heap/cppgc/liveness-broker.cc",
"src/heap/cppgc/logging.cc",
"src/heap/cppgc/object-allocator-inl.h",
"src/heap/cppgc/object-allocator.cc",
"src/heap/cppgc/object-allocator.h",
"src/heap/cppgc/page-memory-inl.h",
"src/heap/cppgc/page-memory.cc",
"src/heap/cppgc/page-memory.h",
......@@ -4041,6 +4046,8 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/pointer-policies.cc",
"src/heap/cppgc/prefinalizer-handler.cc",
"src/heap/cppgc/prefinalizer-handler.h",
"src/heap/cppgc/raw-heap.cc",
"src/heap/cppgc/raw-heap.h",
"src/heap/cppgc/sanitizers.h",
"src/heap/cppgc/source-location.cc",
"src/heap/cppgc/stack.cc",
......
......@@ -16,6 +16,25 @@ class Heap;
class V8_EXPORT Heap {
public:
// Normal spaces are used to store objects of different size classes:
// - kNormal1: < 32 bytes
// - kNormal2: < 64 bytes
// - kNormal3: < 128 bytes
// - kNormal4: >= 128 bytes
// Objects of size greater than 2^16 get stored in the large space. Users can
// register up to 4 arenas for application specific needs.
enum class SpaceType {
kNormal1,
kNormal2,
kNormal3,
kNormal4,
kLarge,
kUserDefined1,
kUserDefined2,
kUserDefined3,
kUserDefined4,
};
static std::unique_ptr<Heap> Create();
virtual ~Heap() = default;
......
......@@ -15,7 +15,8 @@ namespace internal {
inline cppgc::Heap* GetHeapFromPayload(const void* payload) {
return *reinterpret_cast<cppgc::Heap**>(
(reinterpret_cast<uintptr_t>(payload) & api_constants::kPageBaseMask) +
((reinterpret_cast<uintptr_t>(payload) & api_constants::kPageBaseMask) +
api_constants::kGuardPageSize) +
api_constants::kHeapOffset);
}
......
......@@ -29,6 +29,7 @@ static constexpr size_t kFullyConstructedBitMask = size_t{1};
static constexpr size_t kPageSize = size_t{1} << 17;
static constexpr size_t kPageAlignment = kPageSize;
static constexpr size_t kPageBaseMask = ~(kPageAlignment - 1);
static constexpr size_t kGuardPageSize = 4096;
// Offset of the Heap backref.
static constexpr size_t kHeapOffset = 0;
......
......@@ -155,6 +155,18 @@ bool FreeList::IsEmpty() const {
[](const auto* entry) { return !entry; });
}
bool FreeList::Contains(Block block) const {
for (Entry* list : free_list_heads_) {
for (Entry* entry = list; entry; entry = entry->Next()) {
if (entry <= block.address &&
(reinterpret_cast<Address>(block.address) + block.size <=
reinterpret_cast<Address>(entry) + entry->GetSize()))
return true;
}
}
return false;
}
bool FreeList::IsConsistent(size_t index) const {
// Check that freelist head and tail pointers are consistent, i.e.
// - either both are nulls (no entries in the bucket);
......
......@@ -43,6 +43,8 @@ class V8_EXPORT_PRIVATE FreeList {
size_t Size() const;
bool IsEmpty() const;
bool Contains(Block) const;
private:
class Entry;
......
......@@ -8,38 +8,16 @@
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/object-allocator-inl.h"
namespace cppgc {
namespace internal {
void* Heap::Allocate(size_t size, GCInfoIndex index) {
DCHECK(is_allocation_allowed());
// TODO(chromium:1056170): This is merely a dummy implementation and will be
// replaced with proper allocation code throughout the migration.
size_t allocation_size = size + sizeof(HeapObjectHeader);
// The allocation size calculation can overflow for large sizes.
CHECK_GT(allocation_size, size);
// calloc() provides stricter alignment guarantees than the GC. Allocate
// a multiple of kAllocationGranularity to follow restrictions of
// HeapObjectHeader.
allocation_size = (allocation_size + kAllocationMask) & ~kAllocationMask;
void* memory = allocator_->Allocate(allocation_size);
HeapObjectHeader* header =
new (memory) HeapObjectHeader(allocation_size, index);
objects_.push_back(header);
return header->Payload();
}
void* Heap::BasicAllocator::Allocate(size_t size) {
// Can only allocate normal-sized objects.
CHECK_GT(kLargeObjectSizeThreshold, size);
if (current_ == nullptr || (current_ + size) > limit_) {
GetNewPage();
}
void* memory = current_;
current_ += size;
return memory;
void* result = object_allocator_.AllocateObject(size, index);
objects_.push_back(&HeapObjectHeader::FromPayload(result));
return result;
}
} // namespace internal
......
......@@ -117,6 +117,11 @@ bool HeapObjectHeader::IsFree() const {
return GetGCInfoIndex() == kFreeListGCInfoIndex;
}
bool HeapObjectHeader::IsFinalizable() const {
const GCInfo& gc_info = GlobalGCInfoTable::GCInfoFromIndex(GetGCInfoIndex());
return gc_info.finalize;
}
template <HeapObjectHeader::AccessMode mode, HeapObjectHeader::EncodedHalf part,
std::memory_order memory_order>
uint16_t HeapObjectHeader::LoadEncoded() const {
......
......@@ -12,6 +12,8 @@
namespace cppgc {
namespace internal {
STATIC_ASSERT((kAllocationGranularity % sizeof(HeapObjectHeader)) == 0);
void HeapObjectHeader::CheckApiConstants() {
STATIC_ASSERT(api_constants::kFullyConstructedBitMask ==
FullyConstructedField::kMask);
......
......@@ -47,6 +47,7 @@ class HeapObjectHeader {
static constexpr size_t kSizeLog2 = 17;
static constexpr size_t kMaxSize = (size_t{1} << kSizeLog2) - 1;
static constexpr uint16_t kLargeObjectSizeInHeader = 0;
inline static HeapObjectHeader& FromPayload(void* address);
inline static const HeapObjectHeader& FromPayload(const void* address);
......@@ -80,13 +81,12 @@ class HeapObjectHeader {
template <AccessMode = AccessMode::kNonAtomic>
bool IsFree() const;
inline bool IsFinalizable() const;
void Finalize();
private:
enum class EncodedHalf : uint8_t { kLow, kHigh };
static constexpr uint16_t kLargeObjectSizeInHeader = 0;
// Used in |encoded_high_|.
using FullyConstructedField = v8::base::BitField16<bool, 0, 1>;
using UnusedField1 = FullyConstructedField::Next<bool, 1>;
......
......@@ -3,8 +3,14 @@
// found in the LICENSE file.
#include "src/heap/cppgc/heap-page.h"
#include "include/cppgc/internal/api-constants.h"
#include "src/base/logging.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-space.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
namespace internal {
......@@ -20,39 +26,135 @@ Address AlignAddress(Address address, size_t alignment) {
STATIC_ASSERT(kPageSize == api_constants::kPageAlignment);
BasePage::BasePage(Heap* heap) : heap_(heap) {
DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(this) & kPageOffsetMask);
DCHECK_EQ(reinterpret_cast<void*>(&heap_),
FromPayload(this) + api_constants::kHeapOffset);
// static
BasePage* BasePage::FromPayload(void* payload) {
return reinterpret_cast<BasePage*>(
(reinterpret_cast<uintptr_t>(payload) & kPageBaseMask) + kGuardPageSize);
}
// static
BasePage* BasePage::FromPayload(const void* payload) {
return reinterpret_cast<BasePage*>(
reinterpret_cast<uintptr_t>(const_cast<void*>(payload)) & kPageBaseMask);
const BasePage* BasePage::FromPayload(const void* payload) {
return reinterpret_cast<const BasePage*>(
(reinterpret_cast<uintptr_t>(const_cast<void*>(payload)) &
kPageBaseMask) +
kGuardPageSize);
}
BasePage::BasePage(Heap* heap, BaseSpace* space, PageType type)
: heap_(heap), space_(space), type_(type) {
DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) &
kPageOffsetMask);
DCHECK_EQ(reinterpret_cast<void*>(&heap_),
FromPayload(this) + api_constants::kHeapOffset);
DCHECK_EQ(&heap_->raw_heap(), space_->raw_heap());
}
// static
NormalPage* NormalPage::Create(Heap* heap) {
Address reservation = reinterpret_cast<Address>(calloc(1, 2 * kPageSize));
return new (AlignAddress(reservation, kPageSize))
NormalPage(heap, reservation);
NormalPage* NormalPage::Create(NormalPageSpace* space) {
DCHECK(space);
Heap* heap = space->raw_heap()->heap();
void* memory = heap->page_backend()->AllocateNormalPageMemory(space->index());
auto* normal_page = new (memory) NormalPage(heap, space);
space->AddPage(normal_page);
space->free_list().Add(
{normal_page->PayloadStart(), normal_page->PayloadSize()});
return normal_page;
}
// static
void NormalPage::Destroy(NormalPage* page) {
Address reservation = page->reservation_;
DCHECK(page);
BaseSpace* space = page->space();
DCHECK_EQ(space->end(), std::find(space->begin(), space->end(), page));
page->~NormalPage();
free(reservation);
PageBackend* backend = page->heap()->page_backend();
backend->FreeNormalPageMemory(space->index(),
reinterpret_cast<Address>(page));
}
NormalPage::NormalPage(Heap* heap, Address reservation)
: BasePage(heap),
reservation_(reservation),
payload_start_(AlignAddress(reinterpret_cast<Address>(this + 1),
kAllocationGranularity)),
payload_end_(reinterpret_cast<Address>(this) + kPageSize) {
DCHECK_GT(PayloadEnd() - PayloadStart(), kLargeObjectSizeThreshold);
NormalPage::NormalPage(Heap* heap, BaseSpace* space)
: BasePage(heap, space, PageType::kNormal) {
DCHECK_LT(kLargeObjectSizeThreshold,
static_cast<size_t>(PayloadEnd() - PayloadStart()));
}
NormalPage::~NormalPage() = default;
Address NormalPage::PayloadStart() {
return AlignAddress((reinterpret_cast<Address>(this + 1)),
kAllocationGranularity);
}
ConstAddress NormalPage::PayloadStart() const {
return const_cast<NormalPage*>(this)->PayloadStart();
}
Address NormalPage::PayloadEnd() { return PayloadStart() + PayloadSize(); }
ConstAddress NormalPage::PayloadEnd() const {
return const_cast<NormalPage*>(this)->PayloadEnd();
}
// static
size_t NormalPage::PayloadSize() {
const size_t header_size =
RoundUp(sizeof(NormalPage), kAllocationGranularity);
return kPageSize - 2 * kGuardPageSize - header_size;
}
LargePage::LargePage(Heap* heap, BaseSpace* space, size_t size)
: BasePage(heap, space, PageType::kLarge), payload_size_(size) {}
LargePage::~LargePage() = default;
// static
LargePage* LargePage::Create(LargePageSpace* space, size_t size) {
DCHECK(space);
DCHECK_LE(kLargeObjectSizeThreshold, size);
const size_t page_header_size =
RoundUp(sizeof(LargePage), kAllocationGranularity);
const size_t allocation_size = page_header_size + size;
Heap* heap = space->raw_heap()->heap();
void* memory = heap->page_backend()->AllocateLargePageMemory(allocation_size);
LargePage* page = new (memory) LargePage(heap, space, size);
space->AddPage(page);
return page;
}
// static
void LargePage::Destroy(LargePage* page) {
DCHECK(page);
#if DEBUG
BaseSpace* space = page->space();
DCHECK_EQ(space->end(), std::find(space->begin(), space->end(), page));
#endif
page->~LargePage();
PageBackend* backend = page->heap()->page_backend();
backend->FreeLargePageMemory(reinterpret_cast<Address>(page));
}
HeapObjectHeader* LargePage::ObjectHeader() {
return reinterpret_cast<HeapObjectHeader*>(PayloadStart());
}
const HeapObjectHeader* LargePage::ObjectHeader() const {
return reinterpret_cast<const HeapObjectHeader*>(PayloadStart());
}
Address LargePage::PayloadStart() {
return AlignAddress((reinterpret_cast<Address>(this + 1)),
kAllocationGranularity);
}
ConstAddress LargePage::PayloadStart() const {
return const_cast<LargePage*>(this)->PayloadStart();
}
Address LargePage::PayloadEnd() { return PayloadStart() + PayloadSize(); }
ConstAddress LargePage::PayloadEnd() const {
return const_cast<LargePage*>(this)->PayloadEnd();
}
} // namespace internal
......
......@@ -5,37 +5,134 @@
#ifndef V8_HEAP_CPPGC_HEAP_PAGE_H_
#define V8_HEAP_CPPGC_HEAP_PAGE_H_
#include "src/base/iterator.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/heap-object-header.h"
namespace cppgc {
namespace internal {
class BaseSpace;
class NormalPageSpace;
class LargePageSpace;
class Heap;
class PageBackend;
class V8_EXPORT_PRIVATE BasePage {
public:
static BasePage* FromPayload(const void*);
static BasePage* FromPayload(void*);
static const BasePage* FromPayload(const void*);
BasePage(const BasePage&) = delete;
BasePage& operator=(const BasePage&) = delete;
Heap* heap() { return heap_; }
const Heap* heap() const { return heap_; }
BaseSpace* space() { return space_; }
const BaseSpace* space() const { return space_; }
void set_space(BaseSpace* space) { space_ = space; }
bool is_large() const { return type_ == PageType::kLarge; }
protected:
explicit BasePage(Heap* heap);
enum class PageType { kNormal, kLarge };
BasePage(Heap*, BaseSpace*, PageType);
private:
Heap* heap_;
BaseSpace* space_;
PageType type_;
};
class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
template <typename T>
class IteratorImpl : v8::base::iterator<std::forward_iterator_tag, T> {
public:
explicit IteratorImpl(T* p) : p_(p) {}
T& operator*() { return *p_; }
const T& operator*() const { return *p_; }
bool operator==(IteratorImpl other) const { return p_ == other.p_; }
bool operator!=(IteratorImpl other) const { return !(*this == other); }
IteratorImpl& operator++() {
p_ += (p_->GetSize() / sizeof(T));
return *this;
}
IteratorImpl operator++(int) {
IteratorImpl temp(*this);
p_ += (p_->GetSize() / sizeof(T));
return temp;
}
T* base() { return p_; }
private:
T* p_;
};
public:
static NormalPage* Create(Heap* heap);
using iterator = IteratorImpl<HeapObjectHeader>;
using const_iterator = IteratorImpl<const HeapObjectHeader>;
// Allocates a new page.
static NormalPage* Create(NormalPageSpace*);
// Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called).
static void Destroy(NormalPage*);
Address PayloadStart() const { return payload_start_; }
Address PayloadEnd() const { return payload_end_; }
iterator begin() {
return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadStart()));
}
const_iterator begin() const {
return const_iterator(
reinterpret_cast<const HeapObjectHeader*>(PayloadStart()));
}
iterator end() {
return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadEnd()));
}
const_iterator end() const {
return const_iterator(
reinterpret_cast<const HeapObjectHeader*>(PayloadEnd()));
}
Address PayloadStart();
ConstAddress PayloadStart() const;
Address PayloadEnd();
ConstAddress PayloadEnd() const;
static size_t PayloadSize();
private:
NormalPage(Heap* heap, BaseSpace* space);
~NormalPage();
};
class V8_EXPORT_PRIVATE LargePage final : public BasePage {
public:
// Allocates a new page.
static LargePage* Create(LargePageSpace*, size_t);
// Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called).
static void Destroy(LargePage*);
HeapObjectHeader* ObjectHeader();
const HeapObjectHeader* ObjectHeader() const;
Address PayloadStart();
ConstAddress PayloadStart() const;
Address PayloadEnd();
ConstAddress PayloadEnd() const;
size_t PayloadSize() const { return payload_size_; }
private:
explicit NormalPage(Heap* heap, Address reservation);
LargePage(Heap* heap, BaseSpace* space, size_t);
~LargePage();
Address reservation_;
Address payload_start_;
Address payload_end_;
size_t payload_size_;
};
} // namespace internal
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/heap-space.h"
#include <algorithm>
#include "src/base/logging.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
namespace internal {
BaseSpace::BaseSpace(RawHeap* heap, size_t index, PageType type)
: heap_(heap), index_(index), type_(type) {}
void BaseSpace::AddPage(BasePage* page) {
DCHECK_EQ(pages_.cend(), std::find(pages_.cbegin(), pages_.cend(), page));
pages_.push_back(page);
}
void BaseSpace::RemovePage(BasePage* page) {
auto it = std::find(pages_.cbegin(), pages_.cend(), page);
DCHECK_NE(pages_.cend(), it);
pages_.erase(it);
}
NormalPageSpace::NormalPageSpace(RawHeap* heap, size_t index)
: BaseSpace(heap, index, PageType::kNormal) {}
LargePageSpace::LargePageSpace(RawHeap* heap, size_t index)
: BaseSpace(heap, index, PageType::kLarge) {}
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_HEAP_SPACE_H_
#define V8_HEAP_CPPGC_HEAP_SPACE_H_
#include <vector>
#include "src/base/logging.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/free-list.h"
namespace cppgc {
namespace internal {
class RawHeap;
class BasePage;
// BaseSpace is responsible for page management.
class V8_EXPORT_PRIVATE BaseSpace {
public:
using Pages = std::vector<BasePage*>;
using iterator = Pages::iterator;
using const_iterator = Pages::const_iterator;
BaseSpace(const BaseSpace&) = delete;
BaseSpace& operator=(const BaseSpace&) = delete;
iterator begin() { return pages_.begin(); }
const_iterator begin() const { return pages_.begin(); }
iterator end() { return pages_.end(); }
const_iterator end() const { return pages_.end(); }
size_t size() const { return pages_.size(); }
bool is_large() const { return type_ == PageType::kLarge; }
size_t index() const { return index_; }
RawHeap* raw_heap() { return heap_; }
const RawHeap* raw_heap() const { return heap_; }
// Page manipulation functions.
void AddPage(BasePage*);
void RemovePage(BasePage*);
protected:
enum class PageType { kNormal, kLarge };
explicit BaseSpace(RawHeap* heap, size_t index, PageType type);
private:
RawHeap* heap_;
Pages pages_;
const size_t index_;
const PageType type_;
};
class V8_EXPORT_PRIVATE NormalPageSpace final : public BaseSpace {
public:
class LinearAllocationBuffer {
public:
void* Allocate(size_t alloc_size) {
DCHECK_GE(size_, alloc_size);
void* result = start_;
start_ += alloc_size;
size_ -= alloc_size;
return result;
}
void Set(void* ptr, size_t size) {
start_ = static_cast<uint8_t*>(ptr);
size_ = size;
}
void* start() const { return start_; }
size_t size() const { return size_; }
private:
uint8_t* start_ = nullptr;
size_t size_ = 0;
};
NormalPageSpace(RawHeap* heap, size_t index);
LinearAllocationBuffer& linear_allocation_buffer() { return current_lab_; }
const LinearAllocationBuffer& linear_allocation_buffer() const {
return current_lab_;
}
FreeList& free_list() { return free_list_; }
const FreeList& free_list() const { return free_list_; }
private:
LinearAllocationBuffer current_lab_;
FreeList free_list_;
};
class V8_EXPORT_PRIVATE LargePageSpace final : public BaseSpace {
public:
LargePageSpace(RawHeap* heap, size_t index);
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_HEAP_SPACE_H_
......@@ -53,8 +53,10 @@ class StackMarker final : public StackVisitor {
};
Heap::Heap()
: stack_(std::make_unique<Stack>(v8::base::Stack::GetStackStart())),
allocator_(std::make_unique<BasicAllocator>(this)),
: raw_heap_(this),
page_backend_(std::make_unique<PageBackend>(&system_allocator_)),
object_allocator_(&raw_heap_),
stack_(std::make_unique<Stack>(v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()) {}
Heap::~Heap() {
......@@ -100,21 +102,5 @@ Heap::NoAllocationScope::NoAllocationScope(Heap* heap) : heap_(heap) {
}
Heap::NoAllocationScope::~NoAllocationScope() { heap_->no_allocation_scope_--; }
Heap::BasicAllocator::BasicAllocator(Heap* heap) : heap_(heap) {}
Heap::BasicAllocator::~BasicAllocator() {
for (auto* page : used_pages_) {
NormalPage::Destroy(page);
}
}
void Heap::BasicAllocator::GetNewPage() {
auto* page = NormalPage::Create(heap_);
CHECK(page);
used_pages_.push_back(page);
current_ = page->PayloadStart();
limit_ = page->PayloadEnd();
}
} // namespace internal
} // namespace cppgc
......@@ -12,13 +12,16 @@
#include "include/cppgc/internal/gc-info.h"
#include "include/cppgc/internal/persistent-node.h"
#include "include/cppgc/liveness-broker.h"
#include "src/base/page-allocator.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
namespace internal {
class NormalPage;
class Stack;
class V8_EXPORT_PRIVATE LivenessBrokerFactory {
......@@ -89,36 +92,25 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
return weak_persistent_region_;
}
private:
// TODO(chromium:1056170): Remove as soon as arenas are available for
// allocation.
//
// This basic allocator just gets a page from the backend and uses bump
// pointer allocation in the payload to allocate objects. No memory is
// reused across GC calls.
class BasicAllocator final {
public:
explicit BasicAllocator(Heap* heap);
~BasicAllocator();
inline void* Allocate(size_t);
RawHeap& raw_heap() { return raw_heap_; }
const RawHeap& raw_heap() const { return raw_heap_; }
private:
void GetNewPage();
PageBackend* page_backend() { return page_backend_.get(); }
const PageBackend* page_backend() const { return page_backend_.get(); }
Heap* heap_;
Address current_ = nullptr;
Address limit_ = nullptr;
std::vector<NormalPage*> used_pages_;
};
private:
bool in_no_gc_scope() const { return no_gc_scope_ > 0; }
bool is_allocation_allowed() const { return no_allocation_scope_ == 0; }
bool in_no_gc_scope() { return no_gc_scope_ > 0; }
RawHeap raw_heap_;
bool is_allocation_allowed() { return no_allocation_scope_ == 0; }
v8::base::PageAllocator system_allocator_;
std::unique_ptr<PageBackend> page_backend_;
ObjectAllocator object_allocator_;
std::unique_ptr<Stack> stack_;
std::unique_ptr<BasicAllocator> allocator_;
std::vector<HeapObjectHeader*> objects_;
std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_;
std::vector<HeapObjectHeader*> objects_;
PersistentRegion strong_persistent_region_;
PersistentRegion weak_persistent_region_;
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_OBJECT_ALLOCATOR_INL_H_
#define V8_HEAP_CPPGC_OBJECT_ALLOCATOR_INL_H_
#include "src/heap/cppgc/object-allocator.h"
#include <new>
#include "src/base/logging.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h"
namespace cppgc {
namespace internal {
void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
const size_t allocation_size =
RoundUp(size + sizeof(HeapObjectHeader), kAllocationGranularity);
const RawHeap::SpaceType type = GetSpaceIndexForSize(allocation_size);
return AllocateObjectOnSpace(
static_cast<NormalPageSpace*>(raw_heap_->Space(type)), allocation_size,
gcinfo);
}
// static
inline RawHeap::SpaceType ObjectAllocator::GetSpaceIndexForSize(size_t size) {
if (size < 64) {
if (size < 32) return RawHeap::SpaceType::kNormal1;
return RawHeap::SpaceType::kNormal2;
}
if (size < 128) return RawHeap::SpaceType::kNormal3;
return RawHeap::SpaceType::kNormal4;
}
void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace* space,
size_t size, GCInfoIndex gcinfo) {
DCHECK_LT(0u, gcinfo);
NormalPageSpace::LinearAllocationBuffer& current_lab =
space->linear_allocation_buffer();
if (current_lab.size() < size) {
return OutOfLineAllocate(space, size, gcinfo);
}
auto* header =
new (current_lab.Allocate(size)) HeapObjectHeader(size, gcinfo);
return header->Payload();
}
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_OBJECT_ALLOCATOR_INL_H_
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/object-allocator-inl.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-space.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/page-memory.h"
namespace cppgc {
namespace internal {
namespace {
void* AllocateLargeObject(RawHeap* raw_heap, LargePageSpace* space, size_t size,
GCInfoIndex gcinfo) {
// 1. Try to sweep large objects more than size bytes before allocating a new
// large object.
// TODO(chromium:1056170): Add lazy sweep.
// 2. If we have failed in sweeping size bytes, we complete sweeping before
// allocating this large object.
// TODO(chromium:1056170):
// raw_heap->heap()->sweeper()->Complete(space);
LargePage* page = LargePage::Create(space, size);
auto* header = new (page->ObjectHeader())
HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo);
return header->Payload();
}
} // namespace
ObjectAllocator::ObjectAllocator(RawHeap* heap) : raw_heap_(heap) {}
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size,
GCInfoIndex gcinfo) {
DCHECK_EQ(0, size & kAllocationMask);
DCHECK_LE(kFreeListEntrySize, size);
// 1. If this allocation is big enough, allocate a large object.
if (size >= kLargeObjectSizeThreshold) {
auto* large_space = static_cast<LargePageSpace*>(
raw_heap_->Space(RawHeap::SpaceType::kLarge));
return AllocateLargeObject(raw_heap_, large_space, size, gcinfo);
}
// 2. Try to allocate from the freelist.
if (void* result = AllocateFromFreeList(space, size, gcinfo)) {
return result;
}
// 3. Lazily sweep pages of this heap until we find a freed area for
// this allocation or we finish sweeping all pages of this heap.
// TODO(chromium:1056170): Add lazy sweep.
// 4. Complete sweeping.
// TODO(chromium:1056170):
// raw_heap_->heap()->sweeper()->Complete(space);
// 5. Add a new page to this heap.
NormalPage::Create(space);
// 6. Try to allocate from the freelist. This allocation must succeed.
void* result = AllocateFromFreeList(space, size, gcinfo);
CPPGC_CHECK(result);
return result;
}
void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace* space, size_t size,
GCInfoIndex gcinfo) {
const FreeList::Block entry = space->free_list().Allocate(size);
if (!entry.address) return nullptr;
auto& current_lab = space->linear_allocation_buffer();
if (current_lab.size()) {
space->free_list().Add({current_lab.start(), current_lab.size()});
}
current_lab.Set(entry.address, entry.size);
return AllocateObjectOnSpace(space, size, gcinfo);
}
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
#define V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
#include "include/cppgc/internal/gc-info.h"
#include "src/heap/cppgc/heap-space.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
namespace internal {
class V8_EXPORT_PRIVATE ObjectAllocator final {
public:
explicit ObjectAllocator(RawHeap* heap);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
private:
inline static RawHeap::SpaceType GetSpaceIndexForSize(size_t size);
inline void* AllocateObjectOnSpace(NormalPageSpace* space, size_t size,
GCInfoIndex gcinfo);
void* OutOfLineAllocate(NormalPageSpace*, size_t, GCInfoIndex);
void* AllocateFromFreeList(NormalPageSpace*, size_t, GCInfoIndex);
RawHeap* raw_heap_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
......@@ -5,8 +5,8 @@
#include "include/cppgc/internal/pointer-policies.h"
#include "include/cppgc/internal/persistent-node.h"
#include "include/cppgc/internal/accessors.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap.h"
namespace cppgc {
......@@ -22,12 +22,12 @@ void EnabledCheckingPolicy::CheckPointer(const void* ptr) {
}
PersistentRegion& StrongPersistentPolicy::GetPersistentRegion(void* object) {
auto* heap = Heap::From(GetHeapFromPayload(object));
auto* heap = BasePage::FromPayload(object)->heap();
return heap->GetStrongPersistentRegion();
}
PersistentRegion& WeakPersistentPolicy::GetPersistentRegion(void* object) {
auto* heap = Heap::From(GetHeapFromPayload(object));
auto* heap = BasePage::FromPayload(object)->heap();
return heap->GetWeakPersistentRegion();
}
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/heap-space.h"
namespace cppgc {
namespace internal {
RawHeap::RawHeap(Heap* heap) : main_heap_(heap), used_spaces_(0) {
size_t i = 0;
for (; i < static_cast<size_t>(SpaceType::kLarge); ++i) {
spaces_[i] = std::make_unique<NormalPageSpace>(this, i);
}
spaces_[i] = std::make_unique<LargePageSpace>(
this, static_cast<size_t>(SpaceType::kLarge));
used_spaces_ = i + 1;
}
RawHeap::~RawHeap() = default;
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_RAW_HEAP_H_
#define V8_HEAP_CPPGC_RAW_HEAP_H_
#include <array>
#include <iterator>
#include <memory>
#include "include/cppgc/heap.h"
#include "src/base/logging.h"
#include "src/base/macros.h"
namespace cppgc {
namespace internal {
class Heap;
class BaseSpace;
// RawHeap is responsible for space management.
class V8_EXPORT_PRIVATE RawHeap final {
static constexpr size_t kNumberOfSpaces = 9;
public:
using SpaceType = cppgc::Heap::SpaceType;
using Spaces = std::array<std::unique_ptr<BaseSpace>, kNumberOfSpaces>;
using iterator = Spaces::iterator;
using const_iterator = Spaces::const_iterator;
explicit RawHeap(Heap* heap);
~RawHeap();
// Space iteration support.
iterator begin() { return spaces_.begin(); }
const_iterator begin() const { return spaces_.begin(); }
iterator end() { return std::next(spaces_.begin(), used_spaces_); }
const_iterator end() const {
return std::next(spaces_.begin(), used_spaces_);
}
size_t size() const { return used_spaces_; }
BaseSpace* Space(SpaceType type) {
const size_t index = static_cast<size_t>(type);
DCHECK_GT(spaces_.size(), index);
BaseSpace* space = spaces_[index].get();
DCHECK(space);
return space;
}
const BaseSpace* Space(SpaceType space) const {
return const_cast<RawHeap&>(*this).Space(space);
}
Heap* heap() { return main_heap_; }
const Heap* heap() const { return main_heap_; }
private:
Heap* main_heap_;
Spaces spaces_;
size_t used_spaces_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_RAW_HEAP_H_
......@@ -137,6 +137,15 @@ TEST(FreeListTest, Append) {
EXPECT_TRUE(list1.IsEmpty());
}
TEST(FreeListTest, Contains) {
auto blocks = CreateEntries();
FreeList list = CreatePopulatedFreeList(blocks);
for (const auto& block : blocks) {
EXPECT_TRUE(list.Contains({block.Address(), block.Size()}));
}
}
TEST(FreeListTest, Allocate) {
static constexpr size_t kFreeListEntrySizeLog2 =
v8::base::bits::WhichPowerOfTwo(kFreeListEntrySize);
......
......@@ -3,8 +3,19 @@
// found in the LICENSE file.
#include "src/heap/cppgc/heap-page.h"
#include <algorithm>
#include "include/cppgc/allocation.h"
#include "include/cppgc/internal/accessors.h"
#include "include/cppgc/persistent.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/page-memory-inl.h"
#include "src/heap/cppgc/raw-heap.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
......@@ -13,21 +24,224 @@ namespace internal {
namespace {
class PageTest : public testing::TestWithHeap {};
class PageTest : public testing::TestWithHeap {
public:
RawHeap& GetRawHeap() { return Heap::From(GetHeap())->raw_heap(); }
};
class GCed : public GarbageCollected<GCed> {};
template <size_t Size>
class GCed : public GarbageCollected<GCed<Size>> {
public:
virtual void Trace(cppgc::Visitor*) const {}
char array[Size];
};
} // namespace
TEST_F(PageTest, PageLayout) {
auto* np = NormalPage::Create(Heap::From(GetHeap()));
NormalPage::Destroy(np);
}
TEST_F(PageTest, GetHeapForAllocatedObject) {
GCed* gced = MakeGarbageCollected<GCed>(GetHeap());
auto* gced = MakeGarbageCollected<GCed<1>>(GetHeap());
EXPECT_EQ(GetHeap(), GetHeapFromPayload(gced));
}
TEST_F(PageTest, SpaceIndexing) {
RawHeap& heap = GetRawHeap();
size_t space = 0u;
for (const auto& ptr : heap) {
EXPECT_EQ(ptr.get(),
heap.Space(static_cast<cppgc::Heap::SpaceType>(space)));
EXPECT_EQ(&heap, ptr.get()->raw_heap());
EXPECT_EQ(space, ptr->index());
++space;
}
EXPECT_EQ(space, static_cast<size_t>(cppgc::Heap::SpaceType::kUserDefined1));
}
TEST_F(PageTest, PredefinedSpaces) {
using SpaceType = RawHeap::SpaceType;
RawHeap& heap = GetRawHeap();
{
auto* gced = MakeGarbageCollected<GCed<1>>(GetHeap());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal1), space);
EXPECT_EQ(0u, space->index());
EXPECT_FALSE(space->is_large());
}
{
auto* gced = MakeGarbageCollected<GCed<32>>(GetHeap());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal2), space);
EXPECT_EQ(1u, space->index());
EXPECT_FALSE(space->is_large());
}
{
auto* gced = MakeGarbageCollected<GCed<64>>(GetHeap());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal3), space);
EXPECT_EQ(2u, space->index());
EXPECT_FALSE(space->is_large());
}
{
auto* gced = MakeGarbageCollected<GCed<128>>(GetHeap());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kNormal4), space);
EXPECT_EQ(3u, space->index());
EXPECT_FALSE(space->is_large());
}
{
auto* gced =
MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(GetHeap());
BaseSpace* space = NormalPage::FromPayload(gced)->space();
EXPECT_EQ(heap.Space(SpaceType::kLarge), space);
EXPECT_EQ(4u, space->index());
EXPECT_TRUE(space->is_large());
}
}
TEST_F(PageTest, NormalPageIndexing) {
using SpaceType = RawHeap::SpaceType;
constexpr size_t kExpectedNumberOfPages = 10u;
constexpr size_t kObjectSize = 8u;
using Type = GCed<kObjectSize>;
static const size_t kNumberOfObjects =
(kExpectedNumberOfPages * NormalPage::PayloadSize() /
(sizeof(Type) + sizeof(HeapObjectHeader)));
std::vector<Persistent<Type>> persistents(kNumberOfObjects);
for (auto& p : persistents) {
p = MakeGarbageCollected<Type>(GetHeap());
}
const RawHeap& heap = GetRawHeap();
const BaseSpace* space = heap.Space(SpaceType::kNormal1);
EXPECT_EQ(kExpectedNumberOfPages, space->size());
size_t page_n = 0;
for (const BasePage* page : *space) {
EXPECT_FALSE(page->is_large());
EXPECT_EQ(space, page->space());
++page_n;
}
EXPECT_EQ(page_n, space->size());
}
TEST_F(PageTest, LargePageIndexing) {
using SpaceType = RawHeap::SpaceType;
constexpr size_t kExpectedNumberOfPages = 10u;
constexpr size_t kObjectSize = 2 * kLargeObjectSizeThreshold;
using Type = GCed<kObjectSize>;
const size_t kNumberOfObjects = kExpectedNumberOfPages;
std::vector<Persistent<Type>> persistents(kNumberOfObjects);
for (auto& p : persistents) {
p = MakeGarbageCollected<Type>(GetHeap());
}
const RawHeap& heap = GetRawHeap();
const BaseSpace* space = heap.Space(SpaceType::kLarge);
EXPECT_EQ(kExpectedNumberOfPages, space->size());
size_t page_n = 0;
for (const BasePage* page : *space) {
EXPECT_TRUE(page->is_large());
++page_n;
}
EXPECT_EQ(page_n, space->size());
}
TEST_F(PageTest, HeapObjectHeaderOnBasePageIndexing) {
constexpr size_t kObjectSize = 8;
using Type = GCed<kObjectSize>;
const size_t kNumberOfObjects =
NormalPage::PayloadSize() / (sizeof(Type) + sizeof(HeapObjectHeader));
const size_t kLeftSpace =
NormalPage::PayloadSize() % (sizeof(Type) + sizeof(HeapObjectHeader));
std::vector<Persistent<Type>> persistents(kNumberOfObjects);
for (auto& p : persistents) {
p = MakeGarbageCollected<Type>(GetHeap());
}
const auto* page =
static_cast<NormalPage*>(BasePage::FromPayload(persistents[0].Get()));
size_t size = 0;
size_t num = 0;
for (const HeapObjectHeader& header : *page) {
EXPECT_EQ(reinterpret_cast<Address>(persistents[num].Get()),
header.Payload());
size += header.GetSize();
++num;
}
EXPECT_EQ(num, persistents.size());
EXPECT_EQ(size + kLeftSpace, NormalPage::PayloadSize());
}
TEST_F(PageTest, HeapObjectHeaderOnLargePageIndexing) {
constexpr size_t kObjectSize = 2 * kLargeObjectSizeThreshold;
using Type = GCed<kObjectSize>;
auto* gced = MakeGarbageCollected<Type>(GetHeap());
const auto* page = static_cast<LargePage*>(BasePage::FromPayload(gced));
const size_t expected_payload_size =
RoundUp(sizeof(Type) + sizeof(HeapObjectHeader), kAllocationGranularity);
EXPECT_EQ(expected_payload_size, page->PayloadSize());
const HeapObjectHeader* header = page->ObjectHeader();
EXPECT_EQ(reinterpret_cast<Address>(gced), header->Payload());
}
TEST_F(PageTest, NormalPageCreationDestruction) {
RawHeap& heap = GetRawHeap();
const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space =
static_cast<NormalPageSpace*>(heap.Space(RawHeap::SpaceType::kNormal1));
auto* page = NormalPage::Create(space);
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
EXPECT_TRUE(
space->free_list().Contains({page->PayloadStart(), page->PayloadSize()}));
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->free_list().Clear();
EXPECT_FALSE(
space->free_list().Contains({page->PayloadStart(), page->PayloadSize()}));
space->RemovePage(page);
EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page));
NormalPage::Destroy(page);
EXPECT_EQ(nullptr, backend->Lookup(page->PayloadStart()));
}
TEST_F(PageTest, LargePageCreationDestruction) {
constexpr size_t kObjectSize = 2 * kLargeObjectSizeThreshold;
RawHeap& heap = GetRawHeap();
const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space =
static_cast<LargePageSpace*>(heap.Space(RawHeap::SpaceType::kLarge));
auto* page = LargePage::Create(space, kObjectSize);
EXPECT_NE(space->end(), std::find(space->begin(), space->end(), page));
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->RemovePage(page);
EXPECT_EQ(space->end(), std::find(space->begin(), space->end(), page));
LargePage::Destroy(page);
EXPECT_EQ(nullptr, backend->Lookup(page->PayloadStart()));
}
#if DEBUG
TEST_F(PageTest, UnsweptPageDestruction) {
RawHeap& heap = GetRawHeap();
{
auto* space =
static_cast<NormalPageSpace*>(heap.Space(RawHeap::SpaceType::kNormal1));
auto* page = NormalPage::Create(space);
EXPECT_DEATH_IF_SUPPORTED(NormalPage::Destroy(page), "");
}
{
auto* space =
static_cast<LargePageSpace*>(heap.Space(RawHeap::SpaceType::kLarge));
auto* page = LargePage::Create(space, 2 * kLargeObjectSizeThreshold);
EXPECT_DEATH_IF_SUPPORTED(LargePage::Destroy(page), "");
}
}
#endif
} // namespace internal
} // namespace cppgc
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment