Commit 897c5618 authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: shared-cage: Introduce shared cage for all heaps

The CL does the following:
1) Globalizes CagedHeap for all HeapBases;
2) Adds the global variable representing the cage base;
3) Changes all write-barriers to use this global variable for value/slot
   checks;
4) Removes no longer needed functionality introduced in previous CLs.

Bug: v8:12231
Change-Id: I281a7b0bf67e349c988486fc2d43ec6d703fd292
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3688050
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81027}
parent 36e47253
......@@ -10,6 +10,7 @@
#include <cstdint>
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/caged-heap.h"
#include "cppgc/internal/logging.h"
#include "cppgc/platform.h"
#include "v8config.h" // NOLINT(build/include_directory)
......@@ -85,11 +86,17 @@ static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
// TODO(v8:12231): Remove this class entirely so that it doesn't occupy space is
// when CPPGC_YOUNG_GENERATION is off.
struct CagedHeapLocalData final {
explicit CagedHeapLocalData(PageAllocator&);
V8_INLINE static CagedHeapLocalData& Get() {
return *reinterpret_cast<CagedHeapLocalData*>(CagedHeapBase::GetBase());
}
#if defined(CPPGC_YOUNG_GENERATION)
AgeTable age_table;
#endif
private:
friend class CagedHeap;
explicit CagedHeapLocalData(PageAllocator&);
};
} // namespace internal
......
......@@ -5,6 +5,7 @@
#ifndef INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
#define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
#include <climits>
#include <cstddef>
#include "cppgc/internal/api-constants.h"
......@@ -18,23 +19,56 @@ namespace internal {
class V8_EXPORT CagedHeapBase {
public:
V8_INLINE static bool IsWithinNormalPageReservation(uintptr_t heap_base,
void* address) {
return (reinterpret_cast<uintptr_t>(address) - heap_base) <
V8_INLINE static uintptr_t OffsetFromAddress(const void* address) {
return reinterpret_cast<uintptr_t>(address) &
(api_constants::kCagedHeapReservationAlignment - 1);
}
V8_INLINE static bool IsWithinCage(const void* address) {
CPPGC_DCHECK(g_heap_base_);
return (reinterpret_cast<uintptr_t>(address) &
~(api_constants::kCagedHeapReservationAlignment - 1)) ==
g_heap_base_;
}
V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) {
static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT;
static_assert((static_cast<size_t>(1) << kHalfWordShift) ==
api_constants::kCagedHeapReservationSize);
CPPGC_DCHECK(g_heap_base_);
return !(((reinterpret_cast<uintptr_t>(addr1) ^ g_heap_base_) |
(reinterpret_cast<uintptr_t>(addr2) ^ g_heap_base_)) >>
kHalfWordShift);
}
V8_INLINE static bool IsWithinNormalPageReservation(void* address) {
return (reinterpret_cast<uintptr_t>(address) - g_heap_base_) <
api_constants::kCagedHeapNormalPageReservationSize;
}
V8_INLINE static BasePageHandle* LookupPageFromInnerPointer(
uintptr_t heap_base, void* ptr) {
if (V8_LIKELY(IsWithinNormalPageReservation(heap_base, ptr)))
return BasePageHandle::FromPayload(ptr);
V8_INLINE static bool IsWithinLargePageReservation(const void* ptr) {
CPPGC_DCHECK(g_heap_base_);
auto uptr = reinterpret_cast<uintptr_t>(ptr);
return (uptr >= g_heap_base_ +
api_constants::kCagedHeapNormalPageReservationSize) &&
(uptr < g_heap_base_ + api_constants::kCagedHeapReservationSize);
}
V8_INLINE static uintptr_t GetBase() { return g_heap_base_; }
V8_INLINE static BasePageHandle& LookupPageFromInnerPointer(void* ptr) {
if (V8_LIKELY(IsWithinNormalPageReservation(ptr)))
return *BasePageHandle::FromPayload(ptr);
else
return LookupLargePageFromInnerPointer(heap_base, ptr);
return LookupLargePageFromInnerPointer(ptr);
}
private:
static BasePageHandle* LookupLargePageFromInnerPointer(uintptr_t heap_base,
void* address);
friend class CagedHeap;
static BasePageHandle& LookupLargePageFromInnerPointer(void* address);
static uintptr_t g_heap_base_;
};
} // namespace internal
......
......@@ -48,10 +48,6 @@ class V8_EXPORT WriteBarrier final {
Type type = Type::kNone;
#endif // !V8_ENABLE_CHECKS
#if defined(CPPGC_CAGED_HEAP)
uintptr_t start = 0;
CagedHeapLocalData& caged_heap() const {
return *reinterpret_cast<CagedHeapLocalData*>(start);
}
uintptr_t slot_offset = 0;
uintptr_t value_offset = 0;
#endif // CPPGC_CAGED_HEAP
......@@ -169,9 +165,8 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
WriteBarrier::Params& params,
HeapHandleCallback) {
if (!TryGetCagedHeap(value, value, params)) {
return WriteBarrier::Type::kNone;
}
const bool within_cage = CagedHeapBase::IsWithinCage(value);
if (!within_cage) return WriteBarrier::Type::kNone;
// We know that |value| points either within the normal page or to the
// beginning of large-page, so extract the page header by bitmasking.
......@@ -188,35 +183,6 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
template <WriteBarrier::ValueMode value_mode>
struct ValueModeDispatch;
static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
WriteBarrier::Params& params) {
// The compiler must fold these checks into a single one.
if (!value || value == kSentinelPointer) return false;
// Now we are certain that |value| points within the cage.
const uintptr_t real_cage_base =
reinterpret_cast<uintptr_t>(value) &
~(api_constants::kCagedHeapReservationAlignment - 1);
const uintptr_t cage_base_from_slot =
reinterpret_cast<uintptr_t>(slot) &
~(api_constants::kCagedHeapReservationAlignment - 1);
// If |cage_base_from_slot| is different from |real_cage_base|, the slot
// must be on stack, bail out.
if (V8_UNLIKELY(real_cage_base != cage_base_from_slot)) return false;
// Otherwise, set params.start and return.
params.start = real_cage_base;
return true;
}
// Returns whether marking is in progress. If marking is not in progress
// sets the start of the cage accordingly.
//
// TODO(chromium:1056170): Create fast path on API.
static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
};
template <>
......@@ -229,7 +195,7 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
const bool within_cage = TryGetCagedHeap(slot, value, params);
const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
if (!within_cage) return WriteBarrier::Type::kNone;
// We know that |value| points either within the normal page or to the
......@@ -243,8 +209,8 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
if (!heap_handle.is_young_generation_enabled())
return WriteBarrier::Type::kNone;
params.heap = &heap_handle;
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
params.value_offset = CagedHeapBase::OffsetFromAddress(value);
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
#else // !CPPGC_YOUNG_GENERATION
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
......@@ -269,18 +235,16 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
#if defined(CPPGC_YOUNG_GENERATION)
HeapHandle& handle = callback();
if (V8_LIKELY(!IsMarking(handle, params))) {
// params.start is populated by IsMarking().
if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
if (!handle.is_young_generation_enabled()) {
return WriteBarrier::Type::kNone;
}
params.heap = &handle;
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
// params.value_offset stays 0.
if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
// Check if slot is on stack.
// Check if slot is on stack.
if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
}
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
}
#else // !defined(CPPGC_YOUNG_GENERATION)
......@@ -428,13 +392,15 @@ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
CheckParams(Type::kGenerational, params);
const CagedHeapLocalData& local_data = params.caged_heap();
const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
const AgeTable& age_table = local_data.age_table;
// Bail out if the slot is in young generation.
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return;
// TODO(chromium:1029379): Consider reload local_data in the slow path to
// reduce register pressure.
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
params.heap);
}
......@@ -444,7 +410,7 @@ void WriteBarrier::GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer) {
CheckParams(Type::kGenerational, params);
const CagedHeapLocalData& local_data = params.caged_heap();
const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
const AgeTable& age_table = local_data.age_table;
// Assume that if the first element is in young generation, the whole range is
......@@ -452,6 +418,8 @@ void WriteBarrier::GenerationalBarrierForSourceObject(
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return;
// TODO(chromium:1029379): Consider reload local_data in the slow path to
// reduce register pressure.
GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer,
params.heap);
}
......
......@@ -35,16 +35,11 @@ static_assert(api_constants::kCagedHeapReservationAlignment ==
static_assert(api_constants::kCagedHeapNormalPageReservationSize ==
kCagedHeapNormalPageReservationSize);
namespace {
uintptr_t CagedHeapBase::g_heap_base_ = 0u;
// TODO(v8:12231): Remove once shared cage is there. Currently it's only used
// for large pages lookup in the write barrier.
using Cages = std::map<uintptr_t /*cage_base*/, HeapBase*>;
CagedHeap* CagedHeap::instance_ = nullptr;
static Cages& global_cages() {
static v8::base::LeakyObject<Cages> instance;
return *instance.get();
}
namespace {
VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) {
DCHECK_EQ(0u,
......@@ -67,10 +62,25 @@ VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) {
} // namespace
CagedHeap::CagedHeap(HeapBase& heap_base, PageAllocator& platform_allocator)
// static
void CagedHeap::InitializeIfNeeded(PageAllocator& platform_allocator) {
static v8::base::LeakyObject<CagedHeap> caged_heap(platform_allocator);
instance_ = caged_heap.get();
}
// static
CagedHeap& CagedHeap::Instance() {
DCHECK_NOT_NULL(instance_);
return *instance_;
}
CagedHeap::CagedHeap(PageAllocator& platform_allocator)
: reserved_area_(ReserveCagedHeap(platform_allocator)) {
using CagedAddress = CagedHeap::AllocatorType::Address;
CagedHeapBase::g_heap_base_ =
reinterpret_cast<uintptr_t>(reserved_area_.address());
#if defined(CPPGC_POINTER_COMPRESSION)
// With pointer compression only single heap per thread is allowed.
CHECK(!CageBaseGlobal::IsSet());
......@@ -111,18 +121,6 @@ CagedHeap::CagedHeap(HeapBase& heap_base, PageAllocator& platform_allocator)
kCagedHeapNormalPageReservationSize, kPageSize,
v8::base::PageInitializationMode::kAllocatedPagesMustBeZeroInitialized,
v8::base::PageFreeingMode::kMakeInaccessible);
auto is_inserted = global_cages().emplace(
reinterpret_cast<uintptr_t>(reserved_area_.address()), &heap_base);
CHECK(is_inserted.second);
}
CagedHeap::~CagedHeap() {
#if defined(CPPGC_POINTER_COMPRESSION)
CHECK_EQ(reinterpret_cast<uintptr_t>(reserved_area_.address()),
CageBaseGlobalUpdater::GetCageBase());
CageBaseGlobalUpdater::UpdateCageBase(0u);
#endif // defined(CPPGC_POINTER_COMPRESSION)
}
void CagedHeap::NotifyLargePageCreated(LargePage* page) {
......@@ -139,33 +137,33 @@ void CagedHeap::NotifyLargePageDestroyed(LargePage* page) {
DCHECK_EQ(1u, size);
}
BasePage* CagedHeap::LookupPageFromInnerPointer(void* ptr) const {
BasePage& CagedHeap::LookupPageFromInnerPointer(void* ptr) const {
DCHECK(IsOnHeap(ptr));
if (V8_LIKELY(IsWithinNormalPageReservation(ptr))) {
return NormalPage::FromPayload(ptr);
if (V8_LIKELY(CagedHeapBase::IsWithinNormalPageReservation(ptr))) {
return *NormalPage::FromPayload(ptr);
} else {
return LookupLargePageFromInnerPointer(ptr);
}
}
LargePage* CagedHeap::LookupLargePageFromInnerPointer(void* ptr) const {
LargePage& CagedHeap::LookupLargePageFromInnerPointer(void* ptr) const {
auto it = large_pages_.upper_bound(static_cast<LargePage*>(ptr));
DCHECK_NE(large_pages_.begin(), it);
auto* page = *std::next(it, -1);
DCHECK(page);
DCHECK(page->PayloadContains(static_cast<ConstAddress>(ptr)));
return page;
return *page;
}
// static
BasePageHandle* CagedHeapBase::LookupLargePageFromInnerPointer(
uintptr_t heap_base, void* address) {
DCHECK_EQ(0, heap_base & (kCagedHeapReservationAlignment - 1));
auto it = global_cages().find(heap_base);
DCHECK_NE(global_cages().end(), it);
void CagedHeap::ResetForTesting() {
// Clear the large pages to support tests within the same process.
large_pages_.clear();
}
return it->second->caged_heap().LookupLargePageFromInnerPointer(address);
// static
BasePageHandle& CagedHeapBase::LookupLargePageFromInnerPointer(void* address) {
auto& page = CagedHeap::Instance().LookupLargePageFromInnerPointer(address);
return page;
}
} // namespace internal
......
......@@ -9,20 +9,25 @@
#include <memory>
#include <set>
#include "include/cppgc/internal/caged-heap.h"
#include "include/cppgc/platform.h"
#include "src/base/bounded-page-allocator.h"
#include "src/base/lazy-instance.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/virtual-memory.h"
namespace cppgc {
namespace internal {
namespace testing {
class TestWithHeap;
}
struct CagedHeapLocalData;
class HeapBase;
class BasePage;
class LargePage;
class CagedHeap final {
class V8_EXPORT_PRIVATE CagedHeap final {
public:
using AllocatorType = v8::base::BoundedPageAllocator;
......@@ -44,8 +49,7 @@ class CagedHeap final {
return OffsetFromAddress(address) < kCagedHeapNormalPageReservationSize;
}
CagedHeap(HeapBase& heap, PageAllocator& platform_allocator);
~CagedHeap();
static CagedHeap& Instance();
CagedHeap(const CagedHeap&) = delete;
CagedHeap& operator=(const CagedHeap&) = delete;
......@@ -67,8 +71,8 @@ class CagedHeap final {
void NotifyLargePageCreated(LargePage* page);
void NotifyLargePageDestroyed(LargePage* page);
BasePage* LookupPageFromInnerPointer(void* ptr) const;
LargePage* LookupLargePageFromInnerPointer(void* ptr) const;
BasePage& LookupPageFromInnerPointer(void* ptr) const;
LargePage& LookupLargePageFromInnerPointer(void* ptr) const;
CagedHeapLocalData& local_data() {
return *static_cast<CagedHeapLocalData*>(reserved_area_.address());
......@@ -78,6 +82,8 @@ class CagedHeap final {
}
bool IsOnHeap(const void* address) const {
DCHECK_EQ(reserved_area_.address(),
reinterpret_cast<void*>(CagedHeapBase::GetBase()));
return reinterpret_cast<void*>(BaseFromAddress(address)) ==
reserved_area_.address();
}
......@@ -85,6 +91,18 @@ class CagedHeap final {
void* base() const { return reserved_area_.address(); }
private:
friend class v8::base::LeakyObject<CagedHeap>;
friend class HeapBase;
friend class testing::TestWithHeap;
static void InitializeIfNeeded(PageAllocator&);
explicit CagedHeap(PageAllocator& platform_allocator);
void ResetForTesting();
static CagedHeap* instance_;
const VirtualMemory reserved_area_;
std::unique_ptr<AllocatorType> normal_page_bounded_allocator_;
std::unique_ptr<AllocatorType> large_page_bounded_allocator_;
......
......@@ -64,15 +64,7 @@ HeapBase::HeapBase(
lsan_page_allocator_(std::make_unique<v8::base::LsanPageAllocator>(
platform_->GetPageAllocator())),
#endif // LEAK_SANITIZER
#if defined(CPPGC_CAGED_HEAP)
caged_heap_(*this, *page_allocator()),
page_backend_(std::make_unique<PageBackend>(
caged_heap_.normal_page_allocator(),
caged_heap_.large_page_allocator(), *oom_handler_.get())),
#else // !CPPGC_CAGED_HEAP
page_backend_(std::make_unique<PageBackend>(
*page_allocator(), *page_allocator(), *oom_handler_.get())),
#endif // !CPPGC_CAGED_HEAP
page_backend_(InitializePageBackend(*page_allocator(), *oom_handler_)),
stats_collector_(std::make_unique<StatsCollector>(platform_.get())),
stack_(std::make_unique<heap::base::Stack>(
v8::base::Stack::GetStackStart())),
......@@ -109,6 +101,20 @@ size_t HeapBase::ObjectPayloadSize() const {
return ObjectSizeCounter().GetSize(const_cast<RawHeap&>(raw_heap()));
}
// static
std::unique_ptr<PageBackend> HeapBase::InitializePageBackend(
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler) {
#if defined(CPPGC_CAGED_HEAP)
CagedHeap::InitializeIfNeeded(allocator);
auto& caged_heap = CagedHeap::Instance();
return std::make_unique<PageBackend>(caged_heap.normal_page_allocator(),
caged_heap.large_page_allocator(),
oom_handler);
#else // !CPPGC_CAGED_HEAP
return std::make_unique<PageBackend>(allocator, allocator, oom_handler);
#endif // !CPPGC_CAGED_HEAP
}
size_t HeapBase::ExecutePreFinalizers() {
#ifdef CPPGC_ALLOW_ALLOCATIONS_IN_PREFINALIZERS
// Allocations in pre finalizers should not trigger another GC.
......@@ -157,7 +163,7 @@ void HeapBase::ResetRememberedSet() {
return;
}
caged_heap().local_data().age_table.Reset(page_allocator());
CagedHeap::Instance().local_data().age_table.Reset(page_allocator());
remembered_set_.Reset();
return;
}
......
......@@ -28,10 +28,6 @@
#include "src/heap/cppgc/write-barrier.h"
#include "v8config.h" // NOLINT(build/include_directory)
#if defined(CPPGC_CAGED_HEAP)
#include "src/heap/cppgc/caged-heap.h"
#endif
#if defined(CPPGC_YOUNG_GENERATION)
#include "src/heap/cppgc/remembered-set.h"
#endif
......@@ -112,11 +108,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
return stats_collector_.get();
}
#if defined(CPPGC_CAGED_HEAP)
CagedHeap& caged_heap() { return caged_heap_; }
const CagedHeap& caged_heap() const { return caged_heap_; }
#endif
heap::base::Stack* stack() { return stack_.get(); }
PreFinalizerHandler* prefinalizer_handler() {
......@@ -236,6 +227,9 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
using HeapHandle::is_incremental_marking_in_progress;
protected:
static std::unique_ptr<PageBackend> InitializePageBackend(
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler);
// Used by the incremental scheduler to finalize a GC if supported.
virtual void FinalizeIncrementalGarbageCollectionIfNeeded(
cppgc::Heap::StackState) = 0;
......@@ -262,9 +256,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
std::unique_ptr<v8::base::LsanPageAllocator> lsan_page_allocator_;
#endif // LEAK_SANITIZER
#if defined(CPPGC_CAGED_HEAP)
CagedHeap caged_heap_;
#endif // CPPGC_CAGED_HEAP
std::unique_ptr<PageBackend> page_backend_;
// HeapRegistry requires access to page_backend_.
......
......@@ -47,8 +47,8 @@ BasePage* BasePage::FromInnerAddress(const HeapBase* heap, void* address) {
const BasePage* BasePage::FromInnerAddress(const HeapBase* heap,
const void* address) {
#if defined(CPPGC_CAGED_HEAP)
return heap->caged_heap().LookupPageFromInnerPointer(
const_cast<void*>(address));
return static_cast<BasePage*>(
&CagedHeapBase::LookupPageFromInnerPointer(const_cast<void*>(address)));
#else // !defined(CPPGC_CAGED_HEAP)
return reinterpret_cast<const BasePage*>(
heap->page_backend()->Lookup(static_cast<ConstAddress>(address)));
......@@ -243,7 +243,7 @@ LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space,
LargePage* page = new (memory) LargePage(*heap, space, size);
page->SynchronizedStore();
#if defined(CPPGC_CAGED_HEAP)
heap->caged_heap().NotifyLargePageCreated(page);
CagedHeap::Instance().NotifyLargePageCreated(page);
#endif // defined(CPPGC_CAGED_HEAP)
page->heap().stats_collector()->NotifyAllocatedMemory(allocation_size);
return page;
......@@ -267,7 +267,7 @@ void LargePage::Destroy(LargePage* page) {
page->~LargePage();
PageBackend* backend = heap.page_backend();
#if defined(CPPGC_CAGED_HEAP)
heap.caged_heap().NotifyLargePageDestroyed(page);
CagedHeap::Instance().NotifyLargePageDestroyed(page);
#endif // defined(CPPGC_CAGED_HEAP)
heap.stats_collector()->NotifyFreedMemory(AllocationSize(payload_size));
backend->FreeLargePageMemory(reinterpret_cast<Address>(page));
......
......@@ -4,7 +4,6 @@
#include "src/heap/cppgc/marking-verifier.h"
#include "include/cppgc/internal/caged-heap-local-data.h"
#include "src/base/logging.h"
#include "src/heap/cppgc/gc-info-table.h"
#include "src/heap/cppgc/heap-object-header.h"
......@@ -12,6 +11,10 @@
#include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/object-view.h"
#if defined(CPPGC_CAGED_HEAP)
#include "include/cppgc/internal/caged-heap-local-data.h"
#endif // defined(CPPGC_CAGED_HEAP)
namespace cppgc {
namespace internal {
......@@ -106,8 +109,9 @@ bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) {
#if defined(CPPGC_YOUNG_GENERATION)
if (collection_type_ == Heap::Config::CollectionType::kMinor) {
const auto age = heap_.caged_heap().local_data().age_table.GetAge(
heap_.caged_heap().OffsetFromAddress(header.ObjectStart()));
auto& caged_heap = CagedHeap::Instance();
const auto age = caged_heap.local_data().age_table.GetAge(
caged_heap.OffsetFromAddress(header.ObjectStart()));
if (age == AgeTable::Age::kOld) {
// Do not verify old objects.
return true;
......
......@@ -37,7 +37,7 @@ void MarkRangeAsYoung(BasePage* page, Address begin, Address end) {
const bool new_page =
(begin == page->PayloadStart()) && (end == page->PayloadEnd());
auto& age_table = page->heap().caged_heap().local_data().age_table;
auto& age_table = CagedHeap::Instance().local_data().age_table;
age_table.SetAgeForRange(CagedHeap::OffsetFromAddress(begin),
CagedHeap::OffsetFromAddress(end),
AgeTable::Age::kYoung,
......
......@@ -4,7 +4,6 @@
#include "include/cppgc/internal/pointer-policies.h"
#include "include/cppgc/internal/caged-heap-local-data.h"
#include "include/cppgc/internal/persistent-node.h"
#include "src/base/logging.h"
#include "src/base/macros.h"
......
......@@ -428,8 +428,7 @@ class SweepFinalizer final {
SetMemoryInaccessible(header, size);
};
#if defined(CPPGC_CAGED_HEAP)
const uint64_t cage_base =
reinterpret_cast<uint64_t>(page->heap().caged_heap().base());
const uint64_t cage_base = CagedHeapBase::GetBase();
HeapObjectHeader* next_unfinalized = nullptr;
for (auto* unfinalized_header = page_state->unfinalized_objects_head;
......
......@@ -5,7 +5,6 @@
#include "src/heap/cppgc/visitor.h"
#include "src/base/sanitizer/msan.h"
#include "src/heap/cppgc/caged-heap.h"
#include "src/heap/cppgc/gc-info-table.h"
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/heap-object-header.h"
......@@ -13,6 +12,10 @@
#include "src/heap/cppgc/object-view.h"
#include "src/heap/cppgc/page-memory.h"
#if defined(CPPGC_CAGED_HEAP)
#include "src/heap/cppgc/caged-heap.h"
#endif // defined(CPPGC_CAGED_HEAP)
namespace cppgc {
#ifdef V8_ENABLE_CHECKS
......@@ -68,7 +71,7 @@ void ConservativeTracingVisitor::TryTracePointerConservatively(
Address pointer) {
#if defined(CPPGC_CAGED_HEAP)
// TODO(chromium:1056170): Add support for SIMD in stack scanning.
if (V8_LIKELY(!heap_.caged_heap().IsOnHeap(pointer))) return;
if (V8_LIKELY(!CagedHeapBase::IsWithinCage(pointer))) return;
#endif // defined(CPPGC_CAGED_HEAP)
const BasePage* page = reinterpret_cast<const BasePage*>(
......
......@@ -185,23 +185,6 @@ bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(HeapHandle& heap_handle) {
return marker && marker->IsMarking();
}
#if defined(CPPGC_CAGED_HEAP)
// static
bool WriteBarrierTypeForCagedHeapPolicy::IsMarking(
const HeapHandle& heap_handle, WriteBarrier::Params& params) {
const auto& heap_base = internal::HeapBase::From(heap_handle);
const bool is_marking = heap_base.marker() && heap_base.marker()->IsMarking();
// Also set caged heap start here to avoid another call immediately after
// checking IsMarking().
#if defined(CPPGC_YOUNG_GENERATION)
params.start = reinterpret_cast<uintptr_t>(heap_base.caged_heap().base());
#endif // !CPPGC_YOUNG_GENERATION
return is_marking;
}
#endif // CPPGC_CAGED_HEAP
#if defined(CPPGC_YOUNG_GENERATION)
// static
......
......@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "include/cppgc/internal/caged-heap-local-data.h"
#include "include/cppgc/internal/caged-heap.h"
#include "test/unittests/heap/cppgc/tests.h"
#include "testing/gtest/include/gtest/gtest.h"
......@@ -18,8 +19,7 @@ class AgeTableTest : public testing::TestWithHeap {
AgeTableTest()
: disallow_gc_(GetHeapHandle()),
age_table_(Heap::From(GetHeap())->caged_heap().local_data().age_table) {
}
age_table_(CagedHeap::Instance().local_data().age_table) {}
~AgeTableTest() override {
age_table_.Reset(GetPlatform().GetPageAllocator());
......@@ -197,9 +197,8 @@ TEST_F(AgeTableTest, SetAgeForMultipleCardsConsiderAdjacentCards) {
}
TEST_F(AgeTableTest, MarkAllCardsAsYoung) {
void* heap_start = Heap::From(GetHeap())->caged_heap().base();
void* heap_end =
static_cast<uint8_t*>(heap_start) + kCagedHeapReservationSize - 1;
uint8_t* heap_start = reinterpret_cast<uint8_t*>(CagedHeapBase::GetBase());
void* heap_end = heap_start + kCagedHeapReservationSize - 1;
AssertAgeForAddressRange(heap_start, heap_end, Age::kOld);
SetAgeForAddressRange(heap_start, heap_end, Age::kYoung,
AdjacentCardsPolicy::kIgnore);
......
......@@ -248,7 +248,6 @@ TYPED_TEST(MinorGCTestForType, OldObjectIsNotVisited) {
template <typename Type1, typename Type2>
void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
auto* internal_heap = Heap::From(heap);
Persistent<Type1> old =
MakeGarbageCollected<Type1>(heap->GetAllocationHandle());
test->CollectMinor();
......@@ -265,12 +264,10 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
ptr->next = young;
young = ptr;
EXPECT_TRUE(HeapObjectHeader::FromObject(young).IsYoung());
const uintptr_t offset =
internal_heap->caged_heap().OffsetFromAddress(young);
const uintptr_t offset = CagedHeap::OffsetFromAddress(young);
// Age may be young or unknown.
EXPECT_NE(
AgeTable::Age::kOld,
Heap::From(heap)->caged_heap().local_data().age_table.GetAge(offset));
EXPECT_NE(AgeTable::Age::kOld,
CagedHeap::Instance().local_data().age_table.GetAge(offset));
}
}
......
......@@ -49,6 +49,12 @@ TestWithHeap::TestWithHeap()
: heap_(Heap::Create(platform_)),
allocation_handle_(heap_->GetAllocationHandle()) {}
TestWithHeap::~TestWithHeap() {
#if defined(CPPGC_CAGED_HEAP)
CagedHeap::Instance().ResetForTesting();
#endif // defined(CPPGC_CAGED_HEAP)
}
void TestWithHeap::ResetLinearAllocationBuffers() {
Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers();
}
......
......@@ -69,6 +69,7 @@ class TestWithPlatform : public ::testing::Test {
class TestWithHeap : public TestWithPlatform {
public:
TestWithHeap();
~TestWithHeap() override;
void PreciseGC() {
heap_->ForceGarbageCollectionSlow(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment