Commit 340c5458 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

cppgc: Introduce HeapBase

Introduce HeapBase as an internal base implementation for concrete
heaps (unified, stand-alone).

Change-Id: I0aa7185e23f83e01e4e2ca23d983b28e32bb610e
Bug: chromium:1056170
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2238573
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68338}
parent 251dea9d
......@@ -4133,6 +4133,8 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/gc-info.cc",
"src/heap/cppgc/gc-invoker.cc",
"src/heap/cppgc/gc-invoker.h",
"src/heap/cppgc/heap-base.cc",
"src/heap/cppgc/heap-base.h",
"src/heap/cppgc/heap-growing.cc",
"src/heap/cppgc/heap-growing.h",
"src/heap/cppgc/heap-object-header-inl.h",
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/heap-base.h"
#include "src/base/bounded-page-allocator.h"
#include "src/base/platform/platform.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-page-inl.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/stack.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc {
namespace internal {
namespace {
class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
friend class HeapVisitor<ObjectSizeCounter>;
public:
size_t GetSize(RawHeap* heap) {
Traverse(heap);
return accumulated_size_;
}
private:
static size_t ObjectSize(const HeapObjectHeader* header) {
const size_t size =
header->IsLargeObject()
? static_cast<const LargePage*>(BasePage::FromPayload(header))
->PayloadSize()
: header->GetSize();
DCHECK_GE(size, sizeof(HeapObjectHeader));
return size - sizeof(HeapObjectHeader);
}
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
if (header->IsFree()) return true;
accumulated_size_ += ObjectSize(header);
return true;
}
size_t accumulated_size_ = 0;
};
#if defined(CPPGC_CAGED_HEAP)
VirtualMemory ReserveCagedHeap(v8::PageAllocator* platform_allocator) {
DCHECK_EQ(0u,
kCagedHeapReservationSize % platform_allocator->AllocatePageSize());
static constexpr size_t kAllocationTries = 4;
for (size_t i = 0; i < kAllocationTries; ++i) {
void* hint = reinterpret_cast<void*>(RoundDown(
reinterpret_cast<uintptr_t>(platform_allocator->GetRandomMmapAddr()),
kCagedHeapReservationAlignment));
VirtualMemory memory(platform_allocator, kCagedHeapReservationSize,
kCagedHeapReservationAlignment, hint);
if (memory.IsReserved()) return memory;
}
FATAL("Fatal process out of memory: Failed to reserve memory for caged heap");
UNREACHABLE();
}
std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
v8::PageAllocator* platform_allocator, void* caged_heap_start) {
DCHECK(caged_heap_start);
auto start = reinterpret_cast<v8::base::BoundedPageAllocator::Address>(
caged_heap_start);
return std::make_unique<v8::base::BoundedPageAllocator>(
platform_allocator, start, kCagedHeapReservationSize, kPageSize);
}
#endif
} // namespace
HeapBase::HeapBase(std::shared_ptr<cppgc::Platform> platform,
size_t custom_spaces)
: raw_heap_(this, custom_spaces),
platform_(std::move(platform)),
#if defined(CPPGC_CAGED_HEAP)
reserved_area_(ReserveCagedHeap(platform_->GetPageAllocator())),
bounded_allocator_(CreateBoundedAllocator(platform_->GetPageAllocator(),
reserved_area_.address())),
page_backend_(std::make_unique<PageBackend>(bounded_allocator_.get())),
#else
page_backend_(
std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif
stats_collector_(std::make_unique<StatsCollector>()),
stack_(std::make_unique<Stack>(v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()),
object_allocator_(&raw_heap_, page_backend_.get(),
stats_collector_.get()),
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()) {
}
HeapBase::~HeapBase() = default;
size_t HeapBase::ObjectPayloadSize() const {
return ObjectSizeCounter().GetSize(const_cast<RawHeap*>(&raw_heap()));
}
HeapBase::NoGCScope::NoGCScope(HeapBase& heap) : heap_(heap) {
heap_.no_gc_scope_++;
}
HeapBase::NoGCScope::~NoGCScope() { heap_.no_gc_scope_--; }
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_HEAP_BASE_H_
#define V8_HEAP_CPPGC_HEAP_BASE_H_
#include <memory>
#include "include/cppgc/internal/persistent-node.h"
#include "include/cppgc/macros.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sweeper.h"
#include "src/heap/cppgc/virtual-memory.h"
#if defined(CPPGC_CAGED_HEAP)
namespace v8 {
namespace base {
class BoundedPageAllocator;
}
} // namespace v8
#endif
namespace cppgc {
class Platform;
namespace internal {
namespace testing {
class TestWithHeap;
}
class Marker;
class PageBackend;
class PreFinalizerHandler;
class Stack;
class StatsCollector;
// Base class for heap implementations.
class V8_EXPORT_PRIVATE HeapBase {
public:
// NoGCScope allows going over limits and avoids triggering garbage
// collection triggered through allocations or even explicitly.
class V8_EXPORT_PRIVATE NoGCScope final {
CPPGC_STACK_ALLOCATED();
public:
explicit NoGCScope(HeapBase& heap); // NOLINT(runtime/references)
~NoGCScope();
NoGCScope(const NoGCScope&) = delete;
NoGCScope& operator=(const NoGCScope&) = delete;
private:
HeapBase& heap_;
};
HeapBase(std::shared_ptr<cppgc::Platform> platform, size_t custom_spaces);
virtual ~HeapBase();
RawHeap& raw_heap() { return raw_heap_; }
const RawHeap& raw_heap() const { return raw_heap_; }
cppgc::Platform* platform() { return platform_.get(); }
const cppgc::Platform* platform() const { return platform_.get(); }
PageBackend* page_backend() { return page_backend_.get(); }
const PageBackend* page_backend() const { return page_backend_.get(); }
StatsCollector* stats_collector() { return stats_collector_.get(); }
const StatsCollector* stats_collector() const {
return stats_collector_.get();
}
Stack* stack() { return stack_.get(); }
PreFinalizerHandler* prefinalizer_handler() {
return prefinalizer_handler_.get();
}
Marker* marker() const { return marker_.get(); }
ObjectAllocator& object_allocator() { return object_allocator_; }
Sweeper& sweeper() { return sweeper_; }
PersistentRegion& GetStrongPersistentRegion() {
return strong_persistent_region_;
}
const PersistentRegion& GetStrongPersistentRegion() const {
return strong_persistent_region_;
}
PersistentRegion& GetWeakPersistentRegion() {
return weak_persistent_region_;
}
const PersistentRegion& GetWeakPersistentRegion() const {
return weak_persistent_region_;
}
size_t ObjectPayloadSize() const;
protected:
bool in_no_gc_scope() const { return no_gc_scope_ > 0; }
RawHeap raw_heap_;
std::shared_ptr<cppgc::Platform> platform_;
#if defined(CPPGC_CAGED_HEAP)
// The order is important: page_backend_ must be destroyed before
// reserved_area_ is freed.
VirtualMemory reserved_area_;
std::unique_ptr<v8::base::BoundedPageAllocator> bounded_allocator_;
#endif
std::unique_ptr<PageBackend> page_backend_;
std::unique_ptr<StatsCollector> stats_collector_;
std::unique_ptr<Stack> stack_;
std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_;
std::unique_ptr<Marker> marker_;
ObjectAllocator object_allocator_;
Sweeper sweeper_;
PersistentRegion strong_persistent_region_;
PersistentRegion weak_persistent_region_;
size_t no_gc_scope_ = 0;
friend class testing::TestWithHeap;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_HEAP_BASE_H_
......@@ -45,13 +45,13 @@ const HeapObjectHeader* ObjectHeaderFromInnerAddressImpl(const BasePage* page,
} // namespace
// static
BasePage* BasePage::FromInnerAddress(const Heap* heap, void* address) {
BasePage* BasePage::FromInnerAddress(const HeapBase* heap, void* address) {
return const_cast<BasePage*>(
FromInnerAddress(heap, const_cast<const void*>(address)));
}
// static
const BasePage* BasePage::FromInnerAddress(const Heap* heap,
const BasePage* BasePage::FromInnerAddress(const HeapBase* heap,
const void* address) {
return reinterpret_cast<const BasePage*>(
heap->page_backend()->Lookup(static_cast<ConstAddress>(address)));
......@@ -109,7 +109,7 @@ const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
return header;
}
BasePage::BasePage(Heap* heap, BaseSpace* space, PageType type)
BasePage::BasePage(HeapBase* heap, BaseSpace* space, PageType type)
: heap_(heap), space_(space), type_(type) {
DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) &
kPageOffsetMask);
......@@ -137,7 +137,7 @@ void NormalPage::Destroy(NormalPage* page) {
reinterpret_cast<Address>(page));
}
NormalPage::NormalPage(Heap* heap, BaseSpace* space)
NormalPage::NormalPage(HeapBase* heap, BaseSpace* space)
: BasePage(heap, space, PageType::kNormal),
object_start_bitmap_(PayloadStart()) {
DCHECK_LT(kLargeObjectSizeThreshold,
......@@ -181,7 +181,7 @@ size_t NormalPage::PayloadSize() {
return kPageSize - 2 * kGuardPageSize - header_size;
}
LargePage::LargePage(Heap* heap, BaseSpace* space, size_t size)
LargePage::LargePage(HeapBase* heap, BaseSpace* space, size_t size)
: BasePage(heap, space, PageType::kLarge), payload_size_(size) {}
LargePage::~LargePage() = default;
......@@ -197,7 +197,7 @@ LargePage* LargePage::Create(PageBackend* page_backend, LargePageSpace* space,
RoundUp(sizeof(LargePage), kAllocationGranularity);
const size_t allocation_size = page_header_size + size;
Heap* heap = space->raw_heap()->heap();
auto* heap = space->raw_heap()->heap();
void* memory = page_backend->AllocateLargePageMemory(allocation_size);
LargePage* page = new (memory) LargePage(heap, space, size);
return page;
......
......@@ -17,7 +17,7 @@ namespace internal {
class BaseSpace;
class NormalPageSpace;
class LargePageSpace;
class Heap;
class HeapBase;
class PageBackend;
class V8_EXPORT_PRIVATE BasePage {
......@@ -25,16 +25,16 @@ class V8_EXPORT_PRIVATE BasePage {
static inline BasePage* FromPayload(void*);
static inline const BasePage* FromPayload(const void*);
static BasePage* FromInnerAddress(const Heap*, void*);
static const BasePage* FromInnerAddress(const Heap*, const void*);
static BasePage* FromInnerAddress(const HeapBase*, void*);
static const BasePage* FromInnerAddress(const HeapBase*, const void*);
static void Destroy(BasePage*);
BasePage(const BasePage&) = delete;
BasePage& operator=(const BasePage&) = delete;
Heap* heap() { return heap_; }
const Heap* heap() const { return heap_; }
HeapBase* heap() { return heap_; }
const HeapBase* heap() const { return heap_; }
BaseSpace* space() { return space_; }
const BaseSpace* space() const { return space_; }
......@@ -56,10 +56,10 @@ class V8_EXPORT_PRIVATE BasePage {
protected:
enum class PageType { kNormal, kLarge };
BasePage(Heap*, BaseSpace*, PageType);
BasePage(HeapBase*, BaseSpace*, PageType);
private:
Heap* heap_;
HeapBase* heap_;
BaseSpace* space_;
PageType type_;
};
......@@ -153,7 +153,7 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
}
private:
NormalPage(Heap* heap, BaseSpace* space);
NormalPage(HeapBase* heap, BaseSpace* space);
~NormalPage();
ObjectStartBitmap object_start_bitmap_;
......@@ -190,7 +190,7 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage {
}
private:
LargePage(Heap* heap, BaseSpace* space, size_t);
LargePage(HeapBase* heap, BaseSpace* space, size_t);
~LargePage();
size_t payload_size_;
......
......@@ -4,21 +4,10 @@
#include "src/heap/cppgc/heap.h"
#include <memory>
#include "src/base/bounded-page-allocator.h"
#include "src/base/page-allocator.h"
#include "src/base/platform/platform.h"
#include "src/heap/cppgc/gc-invoker.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page-inl.h"
#include "src/heap/cppgc/heap-visitor.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/stack.h"
#include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/sweeper.h"
#include "src/heap/cppgc/virtual-memory.h"
namespace cppgc {
......@@ -51,76 +40,11 @@ void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
}
AllocationHandle& Heap::GetAllocationHandle() {
return internal::Heap::From(this)->GetObjectAllocator();
return internal::Heap::From(this)->object_allocator();
}
namespace internal {
namespace {
class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
friend class HeapVisitor<ObjectSizeCounter>;
public:
size_t GetSize(RawHeap* heap) {
Traverse(heap);
return accumulated_size_;
}
private:
static size_t ObjectSize(const HeapObjectHeader* header) {
const size_t size =
header->IsLargeObject()
? static_cast<const LargePage*>(BasePage::FromPayload(header))
->PayloadSize()
: header->GetSize();
DCHECK_GE(size, sizeof(HeapObjectHeader));
return size - sizeof(HeapObjectHeader);
}
bool VisitHeapObjectHeader(HeapObjectHeader* header) {
if (header->IsFree()) return true;
accumulated_size_ += ObjectSize(header);
return true;
}
size_t accumulated_size_ = 0;
};
#if defined(CPPGC_CAGED_HEAP)
VirtualMemory ReserveCagedHeap(v8::PageAllocator* platform_allocator) {
DCHECK_EQ(0u,
kCagedHeapReservationSize % platform_allocator->AllocatePageSize());
static constexpr size_t kAllocationTries = 4;
for (size_t i = 0; i < kAllocationTries; ++i) {
void* hint = reinterpret_cast<void*>(RoundDown(
reinterpret_cast<uintptr_t>(platform_allocator->GetRandomMmapAddr()),
kCagedHeapReservationAlignment));
VirtualMemory memory(platform_allocator, kCagedHeapReservationSize,
kCagedHeapReservationAlignment, hint);
if (memory.IsReserved()) return memory;
}
FATAL("Fatal process out of memory: Failed to reserve memory for caged heap");
UNREACHABLE();
}
std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
v8::PageAllocator* platform_allocator, void* caged_heap_start) {
DCHECK(caged_heap_start);
auto start = reinterpret_cast<v8::base::BoundedPageAllocator::Address>(
caged_heap_start);
return std::make_unique<v8::base::BoundedPageAllocator>(
platform_allocator, start, kCagedHeapReservationSize, kPageSize);
}
#endif
} // namespace
// static
cppgc::LivenessBroker LivenessBrokerFactory::Create() {
return cppgc::LivenessBroker();
......@@ -128,30 +52,13 @@ cppgc::LivenessBroker LivenessBrokerFactory::Create() {
Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
cppgc::Heap::HeapOptions options)
: raw_heap_(this, options.custom_spaces.size()),
platform_(std::move(platform)),
#if defined(CPPGC_CAGED_HEAP)
reserved_area_(ReserveCagedHeap(platform_->GetPageAllocator())),
bounded_allocator_(CreateBoundedAllocator(platform_->GetPageAllocator(),
reserved_area_.address())),
page_backend_(std::make_unique<PageBackend>(bounded_allocator_.get())),
#else
page_backend_(
std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif
stats_collector_(std::make_unique<StatsCollector>()),
object_allocator_(&raw_heap_, page_backend_.get(),
stats_collector_.get()),
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()),
: HeapBase(platform, options.custom_spaces.size()),
gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints),
stack_(std::make_unique<Stack>(v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()) {
}
options.resource_constraints) {}
Heap::~Heap() {
NoGCScope no_gc(this);
NoGCScope no_gc(*this);
// Finish already running GC if any, but don't finalize live objects.
sweeper_.Finish();
}
......@@ -176,18 +83,10 @@ void Heap::CollectGarbage(Config config) {
}
marker_.reset();
{
NoGCScope no_gc(this);
NoGCScope no_gc(*this);
sweeper_.Start(config.sweeping_type);
}
}
size_t Heap::ObjectPayloadSize() const {
return ObjectSizeCounter().GetSize(const_cast<RawHeap*>(&raw_heap()));
}
Heap::NoGCScope::NoGCScope(Heap* heap) : heap_(heap) { heap_->no_gc_scope_++; }
Heap::NoGCScope::~NoGCScope() { heap_->no_gc_scope_--; }
} // namespace internal
} // namespace cppgc
......@@ -5,69 +5,26 @@
#ifndef V8_HEAP_CPPGC_HEAP_H_
#define V8_HEAP_CPPGC_HEAP_H_
#include <memory>
#include <vector>
#include "include/cppgc/heap.h"
#include "include/cppgc/internal/gc-info.h"
#include "include/cppgc/internal/persistent-node.h"
#include "include/cppgc/liveness-broker.h"
#include "include/cppgc/macros.h"
#include "src/base/page-allocator.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/gc-invoker.h"
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/heap-growing.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sweeper.h"
#include "src/heap/cppgc/virtual-memory.h"
#if defined(CPPGC_CAGED_HEAP)
namespace v8 {
namespace base {
class BoundedPageAllocator;
}
} // namespace v8
#endif
namespace cppgc {
namespace internal {
namespace testing {
class TestWithHeap;
}
class StatsCollector;
class Stack;
class V8_EXPORT_PRIVATE LivenessBrokerFactory {
public:
static LivenessBroker Create();
};
class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap,
class V8_EXPORT_PRIVATE Heap final : public HeapBase,
public cppgc::Heap,
public GarbageCollector {
public:
// NoGCScope allows going over limits and avoids triggering garbage
// collection triggered through allocations or even explicitly.
class V8_EXPORT_PRIVATE NoGCScope final {
CPPGC_STACK_ALLOCATED();
public:
explicit NoGCScope(Heap* heap);
~NoGCScope();
NoGCScope(const NoGCScope&) = delete;
NoGCScope& operator=(const NoGCScope&) = delete;
private:
Heap* const heap_;
};
static Heap* From(cppgc::Heap* heap) { return static_cast<Heap*>(heap); }
static const Heap* From(const cppgc::Heap* heap) {
return static_cast<const Heap*>(heap);
......@@ -79,80 +36,13 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap,
void CollectGarbage(Config config) final;
PreFinalizerHandler* prefinalizer_handler() {
return prefinalizer_handler_.get();
}
PersistentRegion& GetStrongPersistentRegion() {
return strong_persistent_region_;
}
const PersistentRegion& GetStrongPersistentRegion() const {
return strong_persistent_region_;
}
PersistentRegion& GetWeakPersistentRegion() {
return weak_persistent_region_;
}
const PersistentRegion& GetWeakPersistentRegion() const {
return weak_persistent_region_;
}
RawHeap& raw_heap() { return raw_heap_; }
const RawHeap& raw_heap() const { return raw_heap_; }
StatsCollector* stats_collector() { return stats_collector_.get(); }
const StatsCollector* stats_collector() const {
return stats_collector_.get();
}
Stack* stack() { return stack_.get(); }
PageBackend* page_backend() { return page_backend_.get(); }
const PageBackend* page_backend() const { return page_backend_.get(); }
cppgc::Platform* platform() { return platform_.get(); }
const cppgc::Platform* platform() const { return platform_.get(); }
ObjectAllocator& object_allocator() { return object_allocator_; }
Sweeper& sweeper() { return sweeper_; }
size_t epoch() const final { return epoch_; }
size_t ObjectPayloadSize() const;
ObjectAllocator& GetObjectAllocator() { return object_allocator_; }
private:
bool in_no_gc_scope() const { return no_gc_scope_ > 0; }
RawHeap raw_heap_;
std::shared_ptr<cppgc::Platform> platform_;
#if defined(CPPGC_CAGED_HEAP)
// The order is important: page_backend_ must be destroyed before
// reserved_area_ is freed.
VirtualMemory reserved_area_;
std::unique_ptr<v8::base::BoundedPageAllocator> bounded_allocator_;
#endif
std::unique_ptr<PageBackend> page_backend_;
std::unique_ptr<StatsCollector> stats_collector_;
ObjectAllocator object_allocator_;
Sweeper sweeper_;
GCInvoker gc_invoker_;
HeapGrowing growing_;
std::unique_ptr<Stack> stack_;
std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_;
std::unique_ptr<Marker> marker_;
PersistentRegion strong_persistent_region_;
PersistentRegion weak_persistent_region_;
size_t epoch_ = 0;
size_t no_gc_scope_ = 0;
friend class WriteBarrier;
friend class testing::TestWithHeap;
};
} // namespace internal
......
......@@ -12,7 +12,7 @@ namespace internal {
// static
constexpr size_t RawHeap::kNumberOfRegularSpaces;
RawHeap::RawHeap(Heap* heap, size_t custom_spaces) : main_heap_(heap) {
RawHeap::RawHeap(HeapBase* heap, size_t custom_spaces) : main_heap_(heap) {
size_t i = 0;
for (; i < static_cast<size_t>(RegularSpaceType::kLarge); ++i) {
spaces_.push_back(std::make_unique<NormalPageSpace>(this, i));
......
......@@ -16,7 +16,7 @@
namespace cppgc {
namespace internal {
class Heap;
class HeapBase;
class BaseSpace;
// RawHeap is responsible for space management.
......@@ -47,7 +47,7 @@ class V8_EXPORT_PRIVATE RawHeap final {
using iterator = Spaces::iterator;
using const_iterator = Spaces::const_iterator;
explicit RawHeap(Heap* heap, size_t custom_spaces);
explicit RawHeap(HeapBase* heap, size_t custom_spaces);
~RawHeap();
// Space iteration support.
......@@ -77,8 +77,8 @@ class V8_EXPORT_PRIVATE RawHeap final {
return const_cast<RawHeap&>(*this).CustomSpace(space_index);
}
Heap* heap() { return main_heap_; }
const Heap* heap() const { return main_heap_; }
HeapBase* heap() { return main_heap_; }
const HeapBase* heap() const { return main_heap_; }
private:
size_t SpaceIndexForCustomSpace(CustomSpaceIndex space_index) const {
......@@ -96,7 +96,7 @@ class V8_EXPORT_PRIVATE RawHeap final {
return const_cast<RawHeap&>(*this).Space(space_index);
}
Heap* main_heap_;
HeapBase* main_heap_;
Spaces spaces_;
};
......
......@@ -45,13 +45,13 @@ void WriteBarrier::MarkingBarrierSlow(const void*, const void* value) {
if (!value || value == kSentinelPointer) return;
const BasePage* page = BasePage::FromPayload(value);
const Heap* heap = page->heap();
const auto* heap = page->heap();
// Marker being not set up means that no incremental/concurrent marking is in
// progress.
if (!heap->marker_) return;
if (!heap->marker()) return;
MarkValue(page, heap->marker_.get(), value);
MarkValue(page, heap->marker(), value);
}
} // namespace internal
......
......@@ -92,7 +92,7 @@ TEST_F(GCHeapTest, ObjectPayloadSize) {
Heap::From(GetHeap())->CollectGarbage(
GarbageCollector::Config::ConservativeAtomicConfig());
Heap::NoGCScope no_gc_scope(Heap::From(GetHeap()));
Heap::NoGCScope no_gc_scope(*Heap::From(GetHeap()));
for (size_t k = 0; k < kNumberOfObjectsPerArena; ++k) {
MakeGarbageCollected<GCed<kObjectSizes[0]>>(GetAllocationHandle());
......
......@@ -37,7 +37,7 @@ void TestWithHeap::ResetLinearAllocationBuffers() {
}
TestSupportingAllocationOnly::TestSupportingAllocationOnly()
: no_gc_scope_(internal::Heap::From(GetHeap())) {}
: no_gc_scope_(*internal::Heap::From(GetHeap())) {}
} // namespace testing
} // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment