Commit 86af7c6a authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

cppgc: Rename HoH methods

Replaces Payload* terminiology with Object* terminology.
HoH::ObjectSize = just the object, without the header.
HoH::AllocatedSize = both the object and the header.

Payload terminology is retained only for pages.

Bug: chromium:1056170
Change-Id: I568a324ae8728f098be642b024493c375ec873cb
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2892079
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74538}
parent 3fe40a3f
......@@ -210,7 +210,7 @@ UnifiedHeapMarker::UnifiedHeapMarker(Key key, Heap* v8_heap,
void UnifiedHeapMarker::AddObject(void* object) {
mutator_marking_state_.MarkAndPush(
cppgc::internal::HeapObjectHeader::FromPayload(object));
cppgc::internal::HeapObjectHeader::FromObject(object));
}
} // namespace
......
......@@ -382,7 +382,7 @@ class CppGraphBuilderImpl final {
EmbedderNode* AddNode(const HeapObjectHeader& header) {
return static_cast<EmbedderNode*>(
graph_.AddNode(std::unique_ptr<v8::EmbedderGraph::Node>{
new EmbedderNode(header.GetName().value, header.GetSize())}));
new EmbedderNode(header.GetName().value, header.AllocatedSize())}));
}
void AddEdge(State& parent, const HeapObjectHeader& header) {
......@@ -418,7 +418,7 @@ class CppGraphBuilderImpl final {
if (HasEmbedderDataBackref(
reinterpret_cast<v8::internal::Isolate*>(cpp_heap_.isolate()),
v8_value, parent.header()->Payload())) {
v8_value, parent.header()->ObjectStart())) {
parent.get_node()->SetWrapperNode(v8_node);
auto* profiler =
......@@ -512,7 +512,7 @@ class VisiblityVisitor final : public JSVisitor {
void Visit(const void*, cppgc::TraceDescriptor desc) final {
graph_builder_.VisitForVisibility(
&parent_scope_.ParentAsRegularState(),
HeapObjectHeader::FromPayload(desc.base_object_payload));
HeapObjectHeader::FromObject(desc.base_object_payload));
}
void VisitRoot(const void*, cppgc::TraceDescriptor,
const cppgc::SourceLocation&) final {}
......@@ -556,13 +556,13 @@ class GraphBuildingVisitor final : public JSVisitor {
void Visit(const void*, cppgc::TraceDescriptor desc) final {
graph_builder_.AddEdge(
parent_scope_.ParentAsRegularState(),
HeapObjectHeader::FromPayload(desc.base_object_payload));
HeapObjectHeader::FromObject(desc.base_object_payload));
}
void VisitRoot(const void*, cppgc::TraceDescriptor desc,
const cppgc::SourceLocation& loc) final {
graph_builder_.VisitRootForGraphBuilding(
parent_scope_.ParentAsRootState(),
HeapObjectHeader::FromPayload(desc.base_object_payload), loc);
HeapObjectHeader::FromObject(desc.base_object_payload), loc);
}
void VisitWeakRoot(const void*, cppgc::TraceDescriptor, cppgc::WeakCallback,
const void*, const cppgc::SourceLocation&) final {}
......
......@@ -130,7 +130,7 @@ void MovableReferences::AddOrFilter(MovableReference* slot) {
interior_movable_references_.find(slot));
interior_movable_references_.emplace(slot, nullptr);
#if DEBUG
interior_slot_to_object_.emplace(slot, slot_header.Payload());
interior_slot_to_object_.emplace(slot, slot_header.ObjectStart());
#endif // DEBUG
}
......@@ -145,8 +145,8 @@ void MovableReferences::Relocate(Address from, Address to) {
// find the corresponding slot A.x. Object A may be moved already and the
// memory may have been freed, which would result in a crash.
if (!interior_movable_references_.empty()) {
const HeapObjectHeader& header = HeapObjectHeader::FromPayload(to);
const size_t size = header.GetSize() - sizeof(HeapObjectHeader);
const HeapObjectHeader& header = HeapObjectHeader::FromObject(to);
const size_t size = header.ObjectSize();
RelocateInteriorReferences(from, to, size);
}
......@@ -330,7 +330,7 @@ void CompactPage(NormalPage* page, CompactionState& compaction_state) {
header_address < page->PayloadEnd();) {
HeapObjectHeader* header =
reinterpret_cast<HeapObjectHeader*>(header_address);
size_t size = header->GetSize();
size_t size = header->AllocatedSize();
DCHECK_GT(size, 0u);
DCHECK_LT(size, kPageSize);
......
......@@ -125,7 +125,7 @@ void ConcurrentMarkingTask::ProcessWorklists(
BasePage::FromPayload(item.base_object_payload)
->SynchronizedLoad();
const HeapObjectHeader& header =
HeapObjectHeader::FromPayload(item.base_object_payload);
HeapObjectHeader::FromObject(item.base_object_payload);
DCHECK(!header.IsInConstruction<AccessMode::kAtomic>());
DCHECK(header.IsMarked<AccessMode::kAtomic>());
concurrent_marking_state.AccountMarkedBytes(header);
......
......@@ -31,7 +31,7 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
return;
}
auto& header = HeapObjectHeader::FromPayload(object);
auto& header = HeapObjectHeader::FromObject(object);
header.Finalize();
// `object` is guaranteed to be of type GarbageCollected, so getting the
......@@ -43,11 +43,11 @@ void FreeUnreferencedObject(HeapHandle& heap_handle, void* object) {
LargePage::From(base_page)->PayloadSize());
LargePage::Destroy(LargePage::From(base_page));
} else { // Regular object.
const size_t header_size = header.GetSize();
const size_t header_size = header.AllocatedSize();
auto* normal_page = NormalPage::From(base_page);
auto& normal_space = *static_cast<NormalPageSpace*>(base_page->space());
auto& lab = normal_space.linear_allocation_buffer();
ConstAddress payload_end = header.PayloadEnd();
ConstAddress payload_end = header.ObjectEnd();
SetMemoryInaccessible(&header, header_size);
if (payload_end == lab.start()) { // Returning to LAB.
lab.Set(reinterpret_cast<Address>(&header), lab.size() + header_size);
......@@ -65,18 +65,18 @@ namespace {
bool Grow(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
size_t size_delta) {
DCHECK_GE(new_size, header.GetSize() + kAllocationGranularity);
DCHECK_GE(new_size, header.AllocatedSize() + kAllocationGranularity);
DCHECK_GE(size_delta, kAllocationGranularity);
DCHECK(!base_page.is_large());
auto& normal_space = *static_cast<NormalPageSpace*>(base_page.space());
auto& lab = normal_space.linear_allocation_buffer();
if (lab.start() == header.PayloadEnd() && lab.size() >= size_delta) {
if (lab.start() == header.ObjectEnd() && lab.size() >= size_delta) {
// LABs are considered used memory which means that no allocated size
// adjustments are needed.
Address delta_start = lab.Allocate(size_delta);
SetMemoryAccessible(delta_start, size_delta);
header.SetSize(new_size);
header.SetAllocatedSize(new_size);
return true;
}
return false;
......@@ -84,20 +84,20 @@ bool Grow(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
size_t size_delta) {
DCHECK_GE(header.GetSize(), new_size + kAllocationGranularity);
DCHECK_GE(header.AllocatedSize(), new_size + kAllocationGranularity);
DCHECK_GE(size_delta, kAllocationGranularity);
DCHECK(!base_page.is_large());
auto& normal_space = *static_cast<NormalPageSpace*>(base_page.space());
auto& lab = normal_space.linear_allocation_buffer();
Address free_start = header.PayloadEnd() - size_delta;
if (lab.start() == header.PayloadEnd()) {
Address free_start = header.ObjectEnd() - size_delta;
if (lab.start() == header.ObjectEnd()) {
DCHECK_EQ(free_start, lab.start() - size_delta);
// LABs are considered used memory which means that no allocated size
// adjustments are needed.
lab.Set(free_start, lab.size() + size_delta);
SetMemoryInaccessible(lab.start(), size_delta);
header.SetSize(new_size);
header.SetAllocatedSize(new_size);
return true;
}
// Heuristic: Only return memory to the free list if the block is larger than
......@@ -107,7 +107,7 @@ bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
base_page.heap()->stats_collector()->NotifyExplicitFree(size_delta);
normal_space.free_list().Add({free_start, size_delta});
NormalPage::From(&base_page)->object_start_bitmap().SetBit(free_start);
header.SetSize(new_size);
header.SetAllocatedSize(new_size);
}
// Return success in any case, as we want to avoid that embedders start
// copying memory because of small deltas.
......@@ -133,8 +133,8 @@ bool Resize(void* object, size_t new_object_size) {
const size_t new_size = RoundUp<kAllocationGranularity>(
sizeof(HeapObjectHeader) + new_object_size);
auto& header = HeapObjectHeader::FromPayload(object);
const size_t old_size = header.GetSize();
auto& header = HeapObjectHeader::FromObject(object);
const size_t old_size = header.AllocatedSize();
if (new_size > old_size) {
return Grow(header, *base_page, new_size, new_size - old_size);
......
......@@ -132,7 +132,7 @@ FreeList::Block FreeList::Allocate(size_t allocation_size) {
// Final bucket candidate; check initial entry if it is able
// to service this allocation. Do not perform a linear scan,
// as it is considered too costly.
if (!entry || entry->GetSize() < allocation_size) break;
if (!entry || entry->AllocatedSize() < allocation_size) break;
}
if (entry) {
if (!entry->Next()) {
......@@ -141,7 +141,7 @@ FreeList::Block FreeList::Allocate(size_t allocation_size) {
}
entry->Unlink(&free_list_heads_[index]);
biggest_free_list_index_ = index;
return {entry, entry->GetSize()};
return {entry, entry->AllocatedSize()};
}
}
biggest_free_list_index_ = index;
......@@ -158,7 +158,7 @@ size_t FreeList::Size() const {
size_t size = 0;
for (auto* entry : free_list_heads_) {
while (entry) {
size += entry->GetSize();
size += entry->AllocatedSize();
entry = entry->Next();
}
}
......@@ -175,7 +175,7 @@ bool FreeList::ContainsForTesting(Block block) const {
for (Entry* entry = list; entry; entry = entry->Next()) {
if (entry <= block.address &&
(reinterpret_cast<Address>(block.address) + block.size <=
reinterpret_cast<Address>(entry) + entry->GetSize()))
reinterpret_cast<Address>(entry) + entry->AllocatedSize()))
return true;
}
}
......@@ -204,7 +204,7 @@ void FreeList::CollectStatistics(
size_t entry_size = 0;
for (Entry* entry = free_list_heads_[i]; entry; entry = entry->Next()) {
++entry_count;
entry_size += entry->GetSize();
entry_size += entry->AllocatedSize();
}
bucket_size.push_back(static_cast<size_t>(1) << i);
free_count.push_back(entry_count);
......
......@@ -28,17 +28,17 @@ void HeapObjectHeader::Finalize() {
IsLargeObject()
? LargePage::From(BasePage::FromPayload(this))->ObjectSize()
: ObjectSize();
ASAN_UNPOISON_MEMORY_REGION(Payload(), size);
ASAN_UNPOISON_MEMORY_REGION(ObjectStart(), size);
#endif // V8_USE_ADDRESS_SANITIZER
const GCInfo& gc_info = GlobalGCInfoTable::GCInfoFromIndex(GetGCInfoIndex());
if (gc_info.finalize) {
gc_info.finalize(Payload());
gc_info.finalize(ObjectStart());
}
}
HeapObjectName HeapObjectHeader::GetName() const {
const GCInfo& gc_info = GlobalGCInfoTable::GCInfoFromIndex(GetGCInfoIndex());
return gc_info.name(Payload());
return gc_info.name(ObjectStart());
}
} // namespace internal
......
......@@ -57,22 +57,22 @@ class HeapObjectHeader {
static constexpr size_t kMaxSize = (size_t{1} << kSizeLog2) - 1;
static constexpr uint16_t kLargeObjectSizeInHeader = 0;
inline static HeapObjectHeader& FromPayload(void* address);
inline static const HeapObjectHeader& FromPayload(const void* address);
inline static HeapObjectHeader& FromObject(void* address);
inline static const HeapObjectHeader& FromObject(const void* address);
inline HeapObjectHeader(size_t size, GCInfoIndex gc_info_index);
// The payload starts directly after the HeapObjectHeader.
inline Address Payload() const;
// The object starts directly after the HeapObjectHeader.
inline Address ObjectStart() const;
template <AccessMode mode = AccessMode::kNonAtomic>
inline Address PayloadEnd() const;
inline Address ObjectEnd() const;
template <AccessMode mode = AccessMode::kNonAtomic>
inline GCInfoIndex GetGCInfoIndex() const;
template <AccessMode mode = AccessMode::kNonAtomic>
inline size_t GetSize() const;
inline void SetSize(size_t size);
inline size_t AllocatedSize() const;
inline void SetAllocatedSize(size_t size);
template <AccessMode mode = AccessMode::kNonAtomic>
inline size_t ObjectSize() const;
......@@ -149,15 +149,15 @@ static_assert(kAllocationGranularity == sizeof(HeapObjectHeader),
"guarantee alignment");
// static
HeapObjectHeader& HeapObjectHeader::FromPayload(void* payload) {
return *reinterpret_cast<HeapObjectHeader*>(static_cast<Address>(payload) -
HeapObjectHeader& HeapObjectHeader::FromObject(void* object) {
return *reinterpret_cast<HeapObjectHeader*>(static_cast<Address>(object) -
sizeof(HeapObjectHeader));
}
// static
const HeapObjectHeader& HeapObjectHeader::FromPayload(const void* payload) {
const HeapObjectHeader& HeapObjectHeader::FromObject(const void* object) {
return *reinterpret_cast<const HeapObjectHeader*>(
static_cast<ConstAddress>(payload) - sizeof(HeapObjectHeader));
static_cast<ConstAddress>(object) - sizeof(HeapObjectHeader));
}
HeapObjectHeader::HeapObjectHeader(size_t size, GCInfoIndex gc_info_index) {
......@@ -183,16 +183,16 @@ HeapObjectHeader::HeapObjectHeader(size_t size, GCInfoIndex gc_info_index) {
#endif // DEBUG
}
Address HeapObjectHeader::Payload() const {
Address HeapObjectHeader::ObjectStart() const {
return reinterpret_cast<Address>(const_cast<HeapObjectHeader*>(this)) +
sizeof(HeapObjectHeader);
}
template <AccessMode mode>
Address HeapObjectHeader::PayloadEnd() const {
Address HeapObjectHeader::ObjectEnd() const {
DCHECK(!IsLargeObject());
return reinterpret_cast<Address>(const_cast<HeapObjectHeader*>(this)) +
GetSize<mode>();
AllocatedSize<mode>();
}
template <AccessMode mode>
......@@ -203,7 +203,7 @@ GCInfoIndex HeapObjectHeader::GetGCInfoIndex() const {
}
template <AccessMode mode>
size_t HeapObjectHeader::GetSize() const {
size_t HeapObjectHeader::AllocatedSize() const {
// Size is immutable after construction while either marking or sweeping
// is running so relaxed load (if mode == kAtomic) is enough.
uint16_t encoded_low_value =
......@@ -212,20 +212,20 @@ size_t HeapObjectHeader::GetSize() const {
return size;
}
void HeapObjectHeader::SetSize(size_t size) {
void HeapObjectHeader::SetAllocatedSize(size_t size) {
DCHECK(!IsMarked());
encoded_low_ = EncodeSize(size);
}
template <AccessMode mode>
size_t HeapObjectHeader::ObjectSize() const {
DCHECK_GT(GetSize<mode>(), sizeof(HeapObjectHeader));
return GetSize<mode>() - sizeof(HeapObjectHeader);
DCHECK_GT(AllocatedSize<mode>(), sizeof(HeapObjectHeader));
return AllocatedSize<mode>() - sizeof(HeapObjectHeader);
}
template <AccessMode mode>
bool HeapObjectHeader::IsLargeObject() const {
return GetSize<mode>() == kLargeObjectSizeInHeader;
return AllocatedSize<mode>() == kLargeObjectSizeInHeader;
}
template <AccessMode mode>
......@@ -236,7 +236,8 @@ bool HeapObjectHeader::IsInConstruction() const {
}
void HeapObjectHeader::MarkAsFullyConstructed() {
MakeGarbageCollectedTraitInternal::MarkObjectAsFullyConstructed(Payload());
MakeGarbageCollectedTraitInternal::MarkObjectAsFullyConstructed(
ObjectStart());
}
template <AccessMode mode>
......@@ -283,7 +284,7 @@ template <AccessMode mode>
void HeapObjectHeader::Trace(Visitor* visitor) const {
const GCInfo& gc_info =
GlobalGCInfoTable::GCInfoFromIndex(GetGCInfoIndex<mode>());
return gc_info.trace(visitor, Payload());
return gc_info.trace(visitor, ObjectStart());
}
template <AccessMode mode, HeapObjectHeader::EncodedHalf part,
......
......@@ -111,7 +111,7 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
bool operator!=(IteratorImpl other) const { return !(*this == other); }
IteratorImpl& operator++() {
const size_t size = p_->GetSize();
const size_t size = p_->AllocatedSize();
DCHECK_EQ(0, (size & (sizeof(T) - 1)));
p_ += (size / sizeof(T));
if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
......@@ -264,7 +264,7 @@ const HeapObjectHeader* ObjectHeaderFromInnerAddressImpl(const BasePage* page,
const HeapObjectHeader* header =
bitmap.FindHeader<mode>(static_cast<ConstAddress>(address));
DCHECK_LT(address, reinterpret_cast<ConstAddress>(header) +
header->GetSize<AccessMode::kAtomic>());
header->AllocatedSize<AccessMode::kAtomic>());
return header;
}
......
......@@ -148,7 +148,7 @@ bool HeapStatisticsCollector::VisitHeapObjectHeader(HeapObjectHeader* header) {
DCHECK_NOT_NULL(current_space_stats_);
DCHECK_NOT_NULL(current_page_stats_);
if (header->IsFree()) return true;
size_t object_size = header->GetSize();
size_t object_size = header->AllocatedSize();
RecordObjectType(current_space_stats_, header, object_size);
current_page_stats_->used_size_bytes += object_size;
return true;
......
......@@ -9,7 +9,7 @@
namespace cppgc {
bool LivenessBroker::IsHeapObjectAliveImpl(const void* payload) const {
return internal::HeapObjectHeader::FromPayload(payload).IsMarked();
return internal::HeapObjectHeader::FromObject(payload).IsMarked();
}
namespace internal {
......
......@@ -191,7 +191,7 @@ MarkerBase::~MarkerBase() {
MarkingWorklists::EphemeronPairItem item;
while (mutator_marking_state_.discovered_ephemeron_pairs_worklist().Pop(
&item)) {
DCHECK(!HeapObjectHeader::FromPayload(item.key).IsMarked());
DCHECK(!HeapObjectHeader::FromObject(item.key).IsMarked());
}
#else
marking_worklists_.discovered_ephemeron_pairs_worklist()->Clear();
......@@ -472,7 +472,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
mutator_marking_state_.marking_worklist(),
[this](const MarkingWorklists::MarkingItem& item) {
const HeapObjectHeader& header =
HeapObjectHeader::FromPayload(item.base_object_payload);
HeapObjectHeader::FromObject(item.base_object_payload);
DCHECK(!header.IsInConstruction<AccessMode::kNonAtomic>());
DCHECK(header.IsMarked<AccessMode::kNonAtomic>());
mutator_marking_state_.AccountMarkedBytes(header);
......
......@@ -174,9 +174,9 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap,
void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
DCHECK_NOT_NULL(object);
MarkAndPush(HeapObjectHeader::FromPayload(
const_cast<void*>(desc.base_object_payload)),
desc);
MarkAndPush(
HeapObjectHeader::FromObject(const_cast<void*>(desc.base_object_payload)),
desc);
}
void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
......@@ -202,7 +202,7 @@ bool MarkingStateBase::MarkNoPush(HeapObjectHeader& header) {
void MarkingStateBase::MarkAndPush(HeapObjectHeader& header) {
MarkAndPush(
header,
{header.Payload(),
{header.ObjectStart(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
......@@ -222,7 +222,7 @@ void MarkingStateBase::RegisterWeakReferenceIfNeeded(const void* object,
// Filter out already marked values. The write barrier for WeakMember
// ensures that any newly set value after this point is kept alive and does
// not require the callback.
if (HeapObjectHeader::FromPayload(desc.base_object_payload)
if (HeapObjectHeader::FromObject(desc.base_object_payload)
.IsMarked<AccessMode::kAtomic>())
return;
RegisterWeakCallback(weak_callback, parameter);
......@@ -245,7 +245,7 @@ void MarkingStateBase::ProcessWeakContainer(const void* object,
DCHECK_NOT_NULL(object);
HeapObjectHeader& header =
HeapObjectHeader::FromPayload(const_cast<void*>(object));
HeapObjectHeader::FromObject(const_cast<void*>(object));
if (header.IsInConstruction<AccessMode::kAtomic>()) {
not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
......@@ -273,7 +273,7 @@ void MarkingStateBase::ProcessEphemeron(const void* key, const void* value,
// Filter out already marked keys. The write barrier for WeakMember
// ensures that any newly set value after this point is kept alive and does
// not require the callback.
if (HeapObjectHeader::FromPayload(key).IsMarked<AccessMode::kAtomic>()) {
if (HeapObjectHeader::FromObject(key).IsMarked<AccessMode::kAtomic>()) {
if (value_desc.base_object_payload) {
MarkAndPush(value_desc.base_object_payload, value_desc);
} else {
......@@ -291,7 +291,7 @@ void MarkingStateBase::AccountMarkedBytes(const HeapObjectHeader& header) {
header.IsLargeObject<AccessMode::kAtomic>()
? reinterpret_cast<const LargePage*>(BasePage::FromPayload(&header))
->PayloadSize()
: header.GetSize<AccessMode::kAtomic>());
: header.AllocatedSize<AccessMode::kAtomic>());
}
void MarkingStateBase::AccountMarkedBytes(size_t marked_bytes) {
......@@ -348,7 +348,7 @@ void MutatorMarkingState::PushMarkedWeakContainer(HeapObjectHeader& header) {
recently_retraced_weak_containers_.Insert(&header);
PushMarked(
header,
{header.Payload(),
{header.ObjectStart(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
......@@ -359,7 +359,7 @@ void MutatorMarkingState::DynamicallyMarkAddress(ConstAddress address) {
DCHECK(!header.IsInConstruction());
if (MarkNoPush(header)) {
marking_worklist_.Push(
{reinterpret_cast<void*>(header.Payload()),
{reinterpret_cast<void*>(header.ObjectStart()),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
}
......@@ -371,7 +371,7 @@ void MutatorMarkingState::InvokeWeakRootsCallbackIfNeeded(
// the callback instead of registering it.
#if DEBUG
const HeapObjectHeader& header =
HeapObjectHeader::FromPayload(desc.base_object_payload);
HeapObjectHeader::FromObject(desc.base_object_payload);
DCHECK_IMPLIES(header.IsInConstruction(), header.IsMarked());
#endif // DEBUG
weak_callback(LivenessBrokerFactory::Create(), parameter);
......
......@@ -40,7 +40,7 @@ void MarkingVerifierBase::Run(Heap::Config::StackState stack_state,
void VerificationState::VerifyMarked(const void* base_object_payload) const {
const HeapObjectHeader& child_header =
HeapObjectHeader::FromPayload(base_object_payload);
HeapObjectHeader::FromObject(base_object_payload);
if (!child_header.IsMarked()) {
FATAL(
......@@ -50,8 +50,8 @@ void VerificationState::VerifyMarked(const void* base_object_payload) const {
"# %s (%p)\n"
"# \\-> %s (%p)",
parent_ ? parent_->GetName().value : "Stack",
parent_ ? parent_->Payload() : nullptr, child_header.GetName().value,
child_header.Payload());
parent_ ? parent_->ObjectStart() : nullptr,
child_header.GetName().value, child_header.ObjectStart());
}
}
......@@ -66,7 +66,7 @@ void MarkingVerifierBase::VisitInConstructionConservatively(
// itself is marked. If the object is marked, then it is being processed by
// the on-heap phase.
if (verification_state_.IsParentOnStack()) {
verification_state_.VerifyMarked(header.Payload());
verification_state_.VerifyMarked(header.ObjectStart());
return;
}
......
......@@ -95,7 +95,7 @@ void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
stats_collector->NotifyAllocation(size);
MarkRangeAsYoung(page, page->PayloadStart(), page->PayloadEnd());
return header->Payload();
return header->ObjectStart();
}
} // namespace
......
......@@ -123,7 +123,7 @@ void* ObjectAllocator::AllocateObjectOnSpace(NormalPageSpace* space,
->object_start_bitmap()
.SetBit<AccessMode::kAtomic>(reinterpret_cast<ConstAddress>(header));
return header->Payload();
return header->ObjectStart();
}
} // namespace internal
......
......@@ -27,7 +27,7 @@ class UnmarkedObjectsPoisoner : public HeapVisitor<UnmarkedObjectsPoisoner> {
header->IsLargeObject()
? LargePage::From(BasePage::FromPayload(header))->ObjectSize()
: header->ObjectSize();
ASAN_POISON_MEMORY_REGION(header->Payload(), size);
ASAN_POISON_MEMORY_REGION(header->ObjectStart(), size);
return true;
}
};
......
......@@ -13,7 +13,7 @@ namespace internal {
// static
size_t BaseObjectSizeTrait::GetObjectSizeForGarbageCollected(
const void* object) {
const auto& header = HeapObjectHeader::FromPayload(object);
const auto& header = HeapObjectHeader::FromObject(object);
return header.IsLargeObject()
? static_cast<const LargePage*>(BasePage::FromPayload(&header))
->ObjectSize()
......
......@@ -56,12 +56,12 @@ void EnabledCheckingPolicy::CheckPointerImpl(const void* ptr,
// Header checks.
const HeapObjectHeader* header = nullptr;
if (points_to_payload) {
header = &HeapObjectHeader::FromPayload(ptr);
header = &HeapObjectHeader::FromObject(ptr);
} else if (!heap->sweeper().IsSweepingInProgress()) {
// Mixin case.
header = &base_page->ObjectHeaderFromInnerAddress(ptr);
DCHECK_LE(header->Payload(), ptr);
DCHECK_GT(header->PayloadEnd(), ptr);
DCHECK_LE(header->ObjectStart(), ptr);
DCHECK_GT(header->ObjectEnd(), ptr);
}
if (header) {
DCHECK(!header->IsFree());
......
......@@ -200,7 +200,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage(NormalPage* page) {
for (Address begin = page->PayloadStart(), end = page->PayloadEnd();
begin != end;) {
HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(begin);
const size_t size = header->GetSize();
const size_t size = header->AllocatedSize();
// Check if this is a free list entry.
if (header->IsFree<kAtomicAccess>()) {
SetMemoryInaccessible(header, std::min(kFreeListEntrySize, size));
......@@ -290,7 +290,7 @@ class SweepFinalizer final {
// Call finalizers.
for (HeapObjectHeader* object : page_state->unfinalized_objects) {
const size_t size = object->GetSize();
const size_t size = object->AllocatedSize();
object->Finalize();
SetMemoryInaccessible(object, size);
}
......
......@@ -18,9 +18,10 @@ TraceDescriptor TraceTraitFromInnerAddressImpl::GetTraceDescriptor(
page->SynchronizedLoad();
const HeapObjectHeader& header =
page->ObjectHeaderFromInnerAddress<AccessMode::kAtomic>(address);
return {header.Payload(), GlobalGCInfoTable::GCInfoFromIndex(
header.GetGCInfoIndex<AccessMode::kAtomic>())
.trace};
return {header.ObjectStart(),
GlobalGCInfoTable::GCInfoFromIndex(
header.GetGCInfoIndex<AccessMode::kAtomic>())
.trace};
}
} // namespace internal
......
......@@ -29,15 +29,15 @@ namespace {
void TraceConservatively(ConservativeTracingVisitor* conservative_visitor,
const HeapObjectHeader& header) {
Address* payload = reinterpret_cast<Address*>(header.Payload());
Address* object = reinterpret_cast<Address*>(header.ObjectStart());
const size_t object_size =
header.IsLargeObject()
? LargePage::From(BasePage::FromPayload(&header))->ObjectSize()
: header.ObjectSize();
for (size_t i = 0; i < (object_size / sizeof(Address)); ++i) {
Address maybe_ptr = payload[i];
Address maybe_ptr = object[i];
#if defined(MEMORY_SANITIZER)
// |payload| may be uninitialized by design or just contain padding bytes.
// |object| may be uninitialized by design or just contain padding bytes.
// Copy into a local variable that is not poisoned for conservative marking.
// Copy into a temporary variable to maintain the original MSAN state.
MSAN_MEMORY_IS_INITIALIZED(&maybe_ptr, sizeof(maybe_ptr));
......@@ -81,8 +81,8 @@ void ConservativeTracingVisitor::TraceConservativelyIfNeeded(
void ConservativeTracingVisitor::VisitFullyConstructedConservatively(
HeapObjectHeader& header) {
visitor_.Visit(
header.Payload(),
{header.Payload(),
header.ObjectStart(),
{header.ObjectStart(),
GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
}
......
......@@ -87,7 +87,7 @@ TEST_F(CppgcAllocationTest,
ConservativeGCDuringAllocationDoesNotReclaimObject) {
CallbackInCtor* obj = MakeGarbageCollected<CallbackInCtor>(
GetAllocationHandle(), [this]() { ConservativeGC(); });
EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsFree());
EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsFree());
}
namespace {
......
......@@ -145,7 +145,7 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfNormalPage) {
auto* unmarked_object = MakeGarbageCollected<GCedType>(GetAllocationHandle());
auto* marked_object = MakeGarbageCollected<GCedType>(GetAllocationHandle());
HeapObjectHeader::FromPayload(marked_object).TryMarkAtomic();
HeapObjectHeader::FromObject(marked_object).TryMarkAtomic();
auto* page = BasePage::FromPayload(unmarked_object);
auto* space = page->space();
......@@ -160,10 +160,10 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfNormalPage) {
#if !defined(CPPGC_YOUNG_GENERATION)
// Check that the marked object was unmarked.
EXPECT_FALSE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(marked_object).IsMarked());
#else
// Check that the marked object is still marked.
EXPECT_TRUE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(marked_object).IsMarked());
#endif
// Check that free list entries are created right away for non-finalizable
......@@ -184,7 +184,7 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) {
auto* unmarked_object = MakeGarbageCollected<GCedType>(GetAllocationHandle());
auto* marked_object = MakeGarbageCollected<GCedType>(GetAllocationHandle());
HeapObjectHeader::FromPayload(marked_object).TryMarkAtomic();
HeapObjectHeader::FromObject(marked_object).TryMarkAtomic();
auto* unmarked_page = BasePage::FromPayload(unmarked_object);
auto* marked_page = BasePage::FromPayload(marked_object);
......@@ -199,10 +199,10 @@ TEST_F(ConcurrentSweeperTest, BackgroundSweepOfLargePage) {
#if !defined(CPPGC_YOUNG_GENERATION)
// Check that the marked object was unmarked.
EXPECT_FALSE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(marked_object).IsMarked());
#else
// Check that the marked object is still marked.
EXPECT_TRUE(HeapObjectHeader::FromPayload(marked_object).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(marked_object).IsMarked());
#endif
// Check that free list entries are created right away for non-finalizable
......@@ -299,9 +299,8 @@ TEST_F(ConcurrentSweeperTest, IncrementalSweeping) {
MakeGarbageCollected<LargeFinalizable>(GetAllocationHandle());
auto& marked_normal_header =
HeapObjectHeader::FromPayload(marked_normal_object);
auto& marked_large_header =
HeapObjectHeader::FromPayload(marked_large_object);
HeapObjectHeader::FromObject(marked_normal_object);
auto& marked_large_header = HeapObjectHeader::FromObject(marked_large_object);
marked_normal_header.TryMarkAtomic();
marked_large_header.TryMarkAtomic();
......
......@@ -96,10 +96,10 @@ TEST_F(EphemeronPairTest, ValueMarkedWhenKeyIsMarked) {
GCed* value = MakeGarbageCollected<GCed>(GetAllocationHandle());
Persistent<EphemeronHolder> holder =
MakeGarbageCollected<EphemeronHolder>(GetAllocationHandle(), key, value);
HeapObjectHeader::FromPayload(key).TryMarkAtomic();
HeapObjectHeader::FromObject(key).TryMarkAtomic();
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishMarking();
EXPECT_TRUE(HeapObjectHeader::FromPayload(value).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(value).IsMarked());
}
TEST_F(EphemeronPairTest, ValueNotMarkedWhenKeyIsNotMarked) {
......@@ -109,8 +109,8 @@ TEST_F(EphemeronPairTest, ValueNotMarkedWhenKeyIsNotMarked) {
MakeGarbageCollected<EphemeronHolder>(GetAllocationHandle(), key, value);
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishMarking();
EXPECT_FALSE(HeapObjectHeader::FromPayload(key).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromPayload(value).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(key).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(value).IsMarked());
}
TEST_F(EphemeronPairTest, ValueNotMarkedBeforeKey) {
......@@ -120,10 +120,10 @@ TEST_F(EphemeronPairTest, ValueNotMarkedBeforeKey) {
MakeGarbageCollected<EphemeronHolder>(GetAllocationHandle(), key, value);
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishSteps();
EXPECT_FALSE(HeapObjectHeader::FromPayload(value).IsMarked());
HeapObjectHeader::FromPayload(key).TryMarkAtomic();
EXPECT_FALSE(HeapObjectHeader::FromObject(value).IsMarked());
HeapObjectHeader::FromObject(key).TryMarkAtomic();
FinishMarking();
EXPECT_TRUE(HeapObjectHeader::FromPayload(value).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(value).IsMarked());
}
TEST_F(EphemeronPairTest, TraceEphemeronDispatch) {
......@@ -132,10 +132,10 @@ TEST_F(EphemeronPairTest, TraceEphemeronDispatch) {
Persistent<EphemeronHolderTraceEphemeron> holder =
MakeGarbageCollected<EphemeronHolderTraceEphemeron>(GetAllocationHandle(),
key, value);
HeapObjectHeader::FromPayload(key).TryMarkAtomic();
HeapObjectHeader::FromObject(key).TryMarkAtomic();
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishMarking();
EXPECT_TRUE(HeapObjectHeader::FromPayload(value).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(value).IsMarked());
}
TEST_F(EphemeronPairTest, EmptyValue) {
......@@ -143,7 +143,7 @@ TEST_F(EphemeronPairTest, EmptyValue) {
Persistent<EphemeronHolderTraceEphemeron> holder =
MakeGarbageCollected<EphemeronHolderTraceEphemeron>(GetAllocationHandle(),
key, nullptr);
HeapObjectHeader::FromPayload(key).TryMarkAtomic();
HeapObjectHeader::FromObject(key).TryMarkAtomic();
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishMarking();
}
......@@ -156,7 +156,7 @@ TEST_F(EphemeronPairTest, EmptyKey) {
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishMarking();
// Key is not alive and value should thus not be held alive.
EXPECT_FALSE(HeapObjectHeader::FromPayload(value).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(value).IsMarked());
}
using EphemeronPairGCTest = testing::TestWithHeap;
......@@ -220,10 +220,10 @@ TEST_F(EphemeronPairTest, EphemeronPairWithMixinKey) {
EXPECT_NE(static_cast<void*>(value), holder->ephemeron_pair().value.Get());
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishSteps();
EXPECT_FALSE(HeapObjectHeader::FromPayload(value).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromPayload(key).TryMarkAtomic());
EXPECT_FALSE(HeapObjectHeader::FromObject(value).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(key).TryMarkAtomic());
FinishMarking();
EXPECT_TRUE(HeapObjectHeader::FromPayload(value).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(value).IsMarked());
}
TEST_F(EphemeronPairTest, EphemeronPairWithEmptyMixinValue) {
......@@ -233,7 +233,7 @@ TEST_F(EphemeronPairTest, EphemeronPairWithEmptyMixinValue) {
MakeGarbageCollected<EphemeronHolderWithMixins>(GetAllocationHandle(),
key, nullptr);
EXPECT_NE(static_cast<void*>(key), holder->ephemeron_pair().key.Get());
EXPECT_TRUE(HeapObjectHeader::FromPayload(key).TryMarkAtomic());
EXPECT_TRUE(HeapObjectHeader::FromObject(key).TryMarkAtomic());
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get());
FinishSteps();
FinishMarking();
......
......@@ -51,11 +51,11 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToLAB) {
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& lab = space->linear_allocation_buffer();
auto& header = HeapObjectHeader::FromPayload(o);
const size_t size = header.GetSize();
auto& header = HeapObjectHeader::FromObject(o);
const size_t size = header.AllocatedSize();
Address needle = reinterpret_cast<Address>(&header);
// Test checks freeing to LAB.
ASSERT_EQ(lab.start(), header.PayloadEnd());
ASSERT_EQ(lab.start(), header.ObjectEnd());
const size_t lab_size_before_free = lab.size();
const size_t allocated_size_before = AllocatedObjectSize();
subtle::FreeUnreferencedObject(GetHeapHandle(), *o);
......@@ -71,8 +71,8 @@ TEST_F(ExplicitManagementTest, FreeRegularObjectToFreeList) {
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
const auto& lab = space->linear_allocation_buffer();
auto& header = HeapObjectHeader::FromPayload(o);
const size_t size = header.GetSize();
auto& header = HeapObjectHeader::FromObject(o);
const size_t size = header.AllocatedSize();
Address needle = reinterpret_cast<Address>(&header);
// Test checks freeing to free list.
ResetLinearAllocationBuffers();
......@@ -118,7 +118,7 @@ TEST_F(ExplicitManagementTest, FreeBailsOutDuringGC) {
TEST_F(ExplicitManagementTest, GrowAtLAB) {
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
auto& header = HeapObjectHeader::FromPayload(o);
auto& header = HeapObjectHeader::FromObject(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
constexpr size_t kFirstDelta = 8;
EXPECT_TRUE(subtle::Resize(*o, AdditionalBytes(kFirstDelta)));
......@@ -141,7 +141,7 @@ TEST_F(ExplicitManagementTest, GrowAtLAB) {
TEST_F(ExplicitManagementTest, GrowShrinkAtLAB) {
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
auto& header = HeapObjectHeader::FromPayload(o);
auto& header = HeapObjectHeader::FromObject(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
constexpr size_t kDelta = 27;
EXPECT_TRUE(subtle::Resize(*o, AdditionalBytes(kDelta)));
......@@ -158,12 +158,12 @@ TEST_F(ExplicitManagementTest, ShrinkFreeList) {
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
// Force returning to free list by removing the LAB.
ResetLinearAllocationBuffers();
auto& header = HeapObjectHeader::FromPayload(o);
auto& header = HeapObjectHeader::FromObject(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
EXPECT_TRUE(subtle::Resize(*o, AdditionalBytes(0)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o), header.ObjectSize());
EXPECT_TRUE(space->free_list().ContainsForTesting(
{header.PayloadEnd(), ObjectAllocator::kSmallestSpaceSize}));
{header.ObjectEnd(), ObjectAllocator::kSmallestSpaceSize}));
}
TEST_F(ExplicitManagementTest, ShrinkFreeListBailoutAvoidFragmentation) {
......@@ -173,14 +173,14 @@ TEST_F(ExplicitManagementTest, ShrinkFreeListBailoutAvoidFragmentation) {
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
// Force returning to free list by removing the LAB.
ResetLinearAllocationBuffers();
auto& header = HeapObjectHeader::FromPayload(o);
auto& header = HeapObjectHeader::FromObject(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
EXPECT_TRUE(subtle::Resize(*o, AdditionalBytes(0)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(
size_of_o + ObjectAllocator::kSmallestSpaceSize - 1),
header.ObjectSize());
EXPECT_FALSE(space->free_list().ContainsForTesting(
{header.Payload() + RoundUp<kAllocationGranularity>(size_of_o),
{header.ObjectStart() + RoundUp<kAllocationGranularity>(size_of_o),
ObjectAllocator::kSmallestSpaceSize - 1}));
}
......
......@@ -135,7 +135,7 @@ struct PostConstructionCallbackTrait<
internal::GCedWithPostConstructionCallback> {
static void Call(internal::GCedWithPostConstructionCallback* object) {
EXPECT_FALSE(
internal::HeapObjectHeader::FromPayload(object).IsInConstruction());
internal::HeapObjectHeader::FromObject(object).IsInConstruction());
internal::GCedWithPostConstructionCallback::cb_callcount++;
}
};
......@@ -148,7 +148,7 @@ struct PostConstructionCallbackTrait<
static void Call(
internal::GCedWithMixinWithPostConstructionCallback* object) {
EXPECT_FALSE(
internal::HeapObjectHeader::FromPayload(object).IsInConstruction());
internal::HeapObjectHeader::FromObject(object).IsInConstruction());
internal::GCedWithMixinWithPostConstructionCallback::cb_callcount++;
}
};
......@@ -183,10 +183,10 @@ class CheckObjectInConstructionBeforeInitializerList final
public:
CheckObjectInConstructionBeforeInitializerList()
: in_construction_before_initializer_list_(
HeapObjectHeader::FromPayload(this).IsInConstruction()),
HeapObjectHeader::FromObject(this).IsInConstruction()),
unused_int_(GetDummyValue()) {
EXPECT_TRUE(in_construction_before_initializer_list_);
EXPECT_TRUE(HeapObjectHeader::FromPayload(this).IsInConstruction());
EXPECT_TRUE(HeapObjectHeader::FromObject(this).IsInConstruction());
}
void Trace(Visitor*) const {}
......@@ -201,11 +201,10 @@ class CheckMixinInConstructionBeforeInitializerList
public:
explicit CheckMixinInConstructionBeforeInitializerList(void* payload_start)
: in_construction_before_initializer_list_(
HeapObjectHeader::FromPayload(payload_start).IsInConstruction()),
HeapObjectHeader::FromObject(payload_start).IsInConstruction()),
unused_int_(GetDummyValue()) {
EXPECT_TRUE(in_construction_before_initializer_list_);
EXPECT_TRUE(
HeapObjectHeader::FromPayload(payload_start).IsInConstruction());
EXPECT_TRUE(HeapObjectHeader::FromObject(payload_start).IsInConstruction());
}
void Trace(Visitor*) const override {}
......
......@@ -21,7 +21,7 @@ TEST(HeapObjectHeaderTest, Constructor) {
constexpr GCInfoIndex kGCInfoIndex = 17;
constexpr size_t kSize = kAllocationGranularity;
HeapObjectHeader header(kSize, kGCInfoIndex);
EXPECT_EQ(kSize, header.GetSize());
EXPECT_EQ(kSize, header.AllocatedSize());
EXPECT_EQ(kGCInfoIndex, header.GetGCInfoIndex());
EXPECT_TRUE(header.IsInConstruction());
EXPECT_FALSE(header.IsMarked());
......@@ -32,7 +32,7 @@ TEST(HeapObjectHeaderTest, Payload) {
constexpr size_t kSize = kAllocationGranularity;
HeapObjectHeader header(kSize, kGCInfoIndex);
EXPECT_EQ(reinterpret_cast<ConstAddress>(&header) + sizeof(HeapObjectHeader),
header.Payload());
header.ObjectStart());
}
TEST(HeapObjectHeaderTest, PayloadEnd) {
......@@ -40,7 +40,7 @@ TEST(HeapObjectHeaderTest, PayloadEnd) {
constexpr size_t kSize = kAllocationGranularity;
HeapObjectHeader header(kSize, kGCInfoIndex);
EXPECT_EQ(reinterpret_cast<ConstAddress>(&header) + kSize,
header.PayloadEnd());
header.ObjectEnd());
}
TEST(HeapObjectHeaderTest, GetGCInfoIndex) {
......@@ -51,12 +51,12 @@ TEST(HeapObjectHeaderTest, GetGCInfoIndex) {
EXPECT_EQ(kGCInfoIndex, header.GetGCInfoIndex<AccessMode::kAtomic>());
}
TEST(HeapObjectHeaderTest, GetSize) {
TEST(HeapObjectHeaderTest, AllocatedSize) {
constexpr GCInfoIndex kGCInfoIndex = 17;
constexpr size_t kSize = kAllocationGranularity * 23;
HeapObjectHeader header(kSize, kGCInfoIndex);
EXPECT_EQ(kSize, header.GetSize());
EXPECT_EQ(kSize, header.GetSize<AccessMode::kAtomic>());
EXPECT_EQ(kSize, header.AllocatedSize());
EXPECT_EQ(kSize, header.AllocatedSize<AccessMode::kAtomic>());
}
TEST(HeapObjectHeaderTest, IsLargeObject) {
......@@ -79,7 +79,7 @@ TEST(HeapObjectHeaderTest, MarkObjectAsFullyConstructed) {
EXPECT_FALSE(header.IsInConstruction());
// Size shares the same bitfield and should be unaffected by
// MarkObjectAsFullyConstructed.
EXPECT_EQ(kSize, header.GetSize());
EXPECT_EQ(kSize, header.AllocatedSize());
}
TEST(HeapObjectHeaderTest, TryMark) {
......@@ -156,10 +156,10 @@ TEST(HeapObjectHeaderTest, ConstructionBitProtectsNonAtomicWrites) {
~kAllocationMask;
typename std::aligned_storage<kSize, kAllocationGranularity>::type data;
HeapObjectHeader* header = new (&data) HeapObjectHeader(kSize, 1);
ConcurrentGCThread gc_thread(header,
reinterpret_cast<Payload*>(header->Payload()));
ConcurrentGCThread gc_thread(
header, reinterpret_cast<Payload*>(header->ObjectStart()));
CHECK(gc_thread.Start());
new (header->Payload()) Payload();
new (header->ObjectStart()) Payload();
header->MarkAsFullyConstructed();
gc_thread.Join();
}
......
......@@ -161,8 +161,8 @@ TEST_F(PageTest, HeapObjectHeaderOnBasePageIndexing) {
size_t num = 0;
for (const HeapObjectHeader& header : *page) {
EXPECT_EQ(reinterpret_cast<Address>(persistents[num].Get()),
header.Payload());
size += header.GetSize();
header.ObjectStart());
size += header.AllocatedSize();
++num;
}
EXPECT_EQ(num, persistents.size());
......@@ -180,7 +180,7 @@ TEST_F(PageTest, HeapObjectHeaderOnLargePageIndexing) {
EXPECT_EQ(expected_payload_size, page->PayloadSize());
const HeapObjectHeader* header = page->ObjectHeader();
EXPECT_EQ(reinterpret_cast<Address>(gced), header->Payload());
EXPECT_EQ(reinterpret_cast<Address>(gced), header->ObjectStart());
}
TEST_F(PageTest, NormalPageCreationDestruction) {
......@@ -253,7 +253,7 @@ TEST_F(PageTest, UnsweptPageDestruction) {
TEST_F(PageTest, ObjectHeaderFromInnerAddress) {
{
auto* object = MakeGarbageCollected<GCed<64>>(GetAllocationHandle());
const HeapObjectHeader& expected = HeapObjectHeader::FromPayload(object);
const HeapObjectHeader& expected = HeapObjectHeader::FromObject(object);
for (auto* inner_ptr = reinterpret_cast<ConstAddress>(object);
inner_ptr < reinterpret_cast<ConstAddress>(object + 1); ++inner_ptr) {
......@@ -266,7 +266,7 @@ TEST_F(PageTest, ObjectHeaderFromInnerAddress) {
{
auto* object = MakeGarbageCollected<GCed<2 * kLargeObjectSizeThreshold>>(
GetAllocationHandle());
const HeapObjectHeader& expected = HeapObjectHeader::FromPayload(object);
const HeapObjectHeader& expected = HeapObjectHeader::FromObject(object);
const HeapObjectHeader& hoh =
BasePage::FromPayload(object)->ObjectHeaderFromInnerAddress(
......
......@@ -171,20 +171,20 @@ TEST_F(GCHeapTest, AllocateWithAdditionalBytes) {
static constexpr size_t kAdditionalBytes = 10u * kAllocationGranularity;
{
Foo* object = MakeGarbageCollected<Foo>(GetAllocationHandle());
EXPECT_LE(kBaseSize, HeapObjectHeader::FromPayload(object).GetSize());
EXPECT_LE(kBaseSize, HeapObjectHeader::FromObject(object).AllocatedSize());
}
{
Foo* object = MakeGarbageCollected<Foo>(GetAllocationHandle(),
AdditionalBytes(kAdditionalBytes));
EXPECT_LE(kBaseSize + kAdditionalBytes,
HeapObjectHeader::FromPayload(object).GetSize());
HeapObjectHeader::FromObject(object).AllocatedSize());
}
{
Foo* object = MakeGarbageCollected<Foo>(
GetAllocationHandle(),
AdditionalBytes(kAdditionalBytes * kAdditionalBytes));
EXPECT_LE(kBaseSize + kAdditionalBytes * kAdditionalBytes,
HeapObjectHeader::FromPayload(object).GetSize());
HeapObjectHeader::FromObject(object).AllocatedSize());
}
}
......@@ -196,10 +196,11 @@ TEST_F(GCHeapTest, AllocatedSizeDependOnAdditionalBytes) {
Foo* object_with_more_bytes = MakeGarbageCollected<Foo>(
GetAllocationHandle(),
AdditionalBytes(kAdditionalBytes * kAdditionalBytes));
EXPECT_LT(HeapObjectHeader::FromPayload(object).GetSize(),
HeapObjectHeader::FromPayload(object_with_bytes).GetSize());
EXPECT_LT(HeapObjectHeader::FromPayload(object_with_bytes).GetSize(),
HeapObjectHeader::FromPayload(object_with_more_bytes).GetSize());
EXPECT_LT(HeapObjectHeader::FromObject(object).AllocatedSize(),
HeapObjectHeader::FromObject(object_with_bytes).AllocatedSize());
EXPECT_LT(
HeapObjectHeader::FromObject(object_with_bytes).AllocatedSize(),
HeapObjectHeader::FromObject(object_with_more_bytes).AllocatedSize());
}
TEST_F(GCHeapTest, Epoch) {
......@@ -369,7 +370,7 @@ TEST_F(GCHeapDeathTest, LargeChainOfNewStates) {
TEST_F(GCHeapTest, IsHeapObjectAliveForConstPointer) {
// Regression test: http://crbug.com/661363.
GCed<64>* object = MakeGarbageCollected<GCed<64>>(GetAllocationHandle());
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
LivenessBroker broker = internal::LivenessBrokerFactory::Create();
EXPECT_TRUE(header.TryMarkAtomic());
EXPECT_TRUE(broker.IsHeapObjectAlive(object));
......
......@@ -73,7 +73,7 @@ V8_NOINLINE T access(volatile const T& t) {
TEST_F(MarkerTest, PersistentIsMarked) {
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
EXPECT_TRUE(header.IsMarked());
......@@ -82,7 +82,7 @@ TEST_F(MarkerTest, PersistentIsMarked) {
TEST_F(MarkerTest, ReachableMemberIsMarked) {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(parent->child());
HeapObjectHeader& header = HeapObjectHeader::FromObject(parent->child());
EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
EXPECT_TRUE(header.IsMarked());
......@@ -90,7 +90,7 @@ TEST_F(MarkerTest, ReachableMemberIsMarked) {
TEST_F(MarkerTest, UnreachableMemberIsNotMarked) {
Member<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
EXPECT_FALSE(header.IsMarked());
......@@ -98,15 +98,15 @@ TEST_F(MarkerTest, UnreachableMemberIsNotMarked) {
TEST_F(MarkerTest, ObjectReachableFromStackIsMarked) {
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked());
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked());
access(object);
}
TEST_F(MarkerTest, ObjectReachableOnlyFromStackIsNotMarkedIfStackIsEmpty) {
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
EXPECT_FALSE(header.IsMarked());
......@@ -195,10 +195,10 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) {
parent = parent->child();
}
DoMarking(MarkingConfig::StackState::kNoHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked());
parent = root;
for (int i = 0; i < kHierarchyDepth; ++i) {
EXPECT_TRUE(HeapObjectHeader::FromPayload(parent->child()).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(parent->child()).IsMarked());
EXPECT_TRUE(parent->weak_child());
parent = parent->child();
}
......@@ -209,9 +209,9 @@ TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) {
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
root->child()->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromPayload(root->child()->child()).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()->child()).IsMarked());
}
namespace {
......@@ -248,9 +248,9 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
Member<GCedWithCallback> member(obj);
marker->Visitor().Trace(member);
});
EXPECT_FALSE(HeapObjectHeader::FromPayload(object).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked());
marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked());
}
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
......@@ -262,10 +262,10 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj);
marker->Visitor().Trace(member);
EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsMarked());
marker->FinishMarking(
MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(obj).IsMarked());
});
}
......@@ -301,7 +301,7 @@ V8_NOINLINE void RegisterInConstructionObject(
Member<GCedWithCallback> member(obj);
// Adds GCedWithCallback to in-construction objects.
visitor.Trace(member);
EXPECT_FALSE(HeapObjectHeader::FromPayload(obj).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsMarked());
// The inner object GCed is only found if GCedWithCallback is processed.
storage.set_object(obj->gced());
},
......@@ -321,9 +321,9 @@ TEST_F(MarkerTest,
GCObliviousObjectStorage storage;
RegisterInConstructionObject(GetAllocationHandle(), marker()->Visitor(),
storage);
EXPECT_FALSE(HeapObjectHeader::FromPayload(storage.object()).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(storage.object()).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
}
TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) {
......@@ -340,7 +340,7 @@ TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) {
}
// {root} object must be marked at this point because we do not allow
// encountering kSentinelPointer in WeakMember on regular Trace() calls.
ASSERT_TRUE(HeapObjectHeader::FromPayload(root.Get()).IsMarked());
ASSERT_TRUE(HeapObjectHeader::FromObject(root.Get()).IsMarked());
root->SetWeakChild(kSentinelPointer);
marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
EXPECT_EQ(kSentinelPointer, root->weak_child());
......@@ -390,17 +390,17 @@ constexpr IncrementalMarkingTest::MarkingConfig
TEST_F(IncrementalMarkingTest, RootIsMarkedAfterMarkingStarted) {
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
EXPECT_FALSE(HeapObjectHeader::FromPayload(root).IsMarked());
EXPECT_FALSE(HeapObjectHeader::FromObject(root).IsMarked());
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig);
EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked());
FinishMarking();
}
TEST_F(IncrementalMarkingTest, MemberIsMarkedAfterMarkingSteps) {
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(root->child());
HeapObjectHeader& header = HeapObjectHeader::FromObject(root->child());
EXPECT_FALSE(header.IsMarked());
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig);
......@@ -416,7 +416,7 @@ TEST_F(IncrementalMarkingTest,
IncrementalPreciseMarkingConfig);
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
HeapObjectHeader& header = HeapObjectHeader::FromPayload(root->child());
HeapObjectHeader& header = HeapObjectHeader::FromObject(root->child());
EXPECT_TRUE(header.IsMarked());
FinishMarking();
}
......@@ -438,7 +438,7 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) {
const HeapObjectHeader* header;
MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [this, &holder, &header](GCedWithCallback* obj) {
header = &HeapObjectHeader::FromPayload(obj);
header = &HeapObjectHeader::FromObject(obj);
holder->member_ = obj;
EXPECT_FALSE(header->IsMarked());
FinishSteps(MarkingConfig::StackState::kMayContainHeapPointers);
......
......@@ -56,7 +56,7 @@ V8_NOINLINE T access(volatile const T& t) {
TEST_F(MarkingVerifierTest, DoesntDieOnMarkedOnStackReference) {
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader::FromPayload(object).TryMarkAtomic();
HeapObjectHeader::FromObject(object).TryMarkAtomic();
VerifyMarking(Heap::From(GetHeap())->AsBase(),
StackState::kMayContainHeapPointers);
access(object);
......@@ -64,17 +64,17 @@ TEST_F(MarkingVerifierTest, DoesntDieOnMarkedOnStackReference) {
TEST_F(MarkingVerifierTest, DoesntDieOnMarkedMember) {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader::FromPayload(parent.Get()).TryMarkAtomic();
HeapObjectHeader::FromObject(parent.Get()).TryMarkAtomic();
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader::FromPayload(parent->child()).TryMarkAtomic();
HeapObjectHeader::FromObject(parent->child()).TryMarkAtomic();
VerifyMarking(Heap::From(GetHeap())->AsBase(), StackState::kNoHeapPointers);
}
TEST_F(MarkingVerifierTest, DoesntDieOnMarkedWeakMember) {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader::FromPayload(parent.Get()).TryMarkAtomic();
HeapObjectHeader::FromObject(parent.Get()).TryMarkAtomic();
parent->SetWeakChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader::FromPayload(parent->weak_child()).TryMarkAtomic();
HeapObjectHeader::FromObject(parent->weak_child()).TryMarkAtomic();
VerifyMarking(Heap::From(GetHeap())->AsBase(), StackState::kNoHeapPointers);
}
......@@ -94,7 +94,7 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
TEST_F(MarkingVerifierTest, DoesntDieOnInConstructionOnObject) {
MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [this](GCedWithCallback* obj) {
HeapObjectHeader::FromPayload(obj).TryMarkAtomic();
HeapObjectHeader::FromObject(obj).TryMarkAtomic();
VerifyMarking(Heap::From(GetHeap())->AsBase(),
StackState::kMayContainHeapPointers);
});
......@@ -164,7 +164,7 @@ TEST_F(MarkingVerifierDeathTest, DieOnUnmarkedOnStackReference) {
TEST_F(MarkingVerifierDeathTest, DieOnUnmarkedMember) {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader::FromPayload(parent.Get()).TryMarkAtomic();
HeapObjectHeader::FromObject(parent.Get()).TryMarkAtomic();
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
EXPECT_DEATH_IF_SUPPORTED(VerifyMarking(Heap::From(GetHeap())->AsBase(),
StackState::kNoHeapPointers),
......@@ -173,7 +173,7 @@ TEST_F(MarkingVerifierDeathTest, DieOnUnmarkedMember) {
TEST_F(MarkingVerifierDeathTest, DieOnUnmarkedWeakMember) {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader::FromPayload(parent.Get()).TryMarkAtomic();
HeapObjectHeader::FromObject(parent.Get()).TryMarkAtomic();
parent->SetWeakChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
EXPECT_DEATH_IF_SUPPORTED(VerifyMarking(Heap::From(GetHeap())->AsBase(),
StackState::kNoHeapPointers),
......
......@@ -64,7 +64,7 @@ TEST_F(MarkingVisitorTest, MarkedBytesAreInitiallyZero) {
TEST_F(MarkingVisitorTest, MarkMember) {
Member<GCed> object(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -79,7 +79,7 @@ TEST_F(MarkingVisitorTest, MarkMemberMixin) {
GCedWithMixin* object(
MakeGarbageCollected<GCedWithMixin>(GetAllocationHandle()));
Member<Mixin> mixin(object);
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -92,7 +92,7 @@ TEST_F(MarkingVisitorTest, MarkMemberMixin) {
TEST_F(MarkingVisitorTest, MarkPersistent) {
Persistent<GCed> object(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -107,7 +107,7 @@ TEST_F(MarkingVisitorTest, MarkPersistentMixin) {
GCedWithMixin* object(
MakeGarbageCollected<GCedWithMixin>(GetAllocationHandle()));
Persistent<Mixin> mixin(object);
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -122,7 +122,7 @@ TEST_F(MarkingVisitorTest, MarkPersistentMixin) {
TEST_F(MarkingVisitorTest, DontMarkWeakMember) {
WeakMember<GCed> object(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -137,7 +137,7 @@ TEST_F(MarkingVisitorTest, DontMarkWeakMemberMixin) {
GCedWithMixin* object(
MakeGarbageCollected<GCedWithMixin>(GetAllocationHandle()));
WeakMember<Mixin> mixin(object);
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -151,7 +151,7 @@ TEST_F(MarkingVisitorTest, DontMarkWeakMemberMixin) {
TEST_F(MarkingVisitorTest, DontMarkWeakPersistent) {
WeakPersistent<GCed> object(
MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -166,7 +166,7 @@ TEST_F(MarkingVisitorTest, DontMarkWeakPersistentMixin) {
GCedWithMixin* object(
MakeGarbageCollected<GCedWithMixin>(GetAllocationHandle()));
WeakPersistent<Mixin> mixin(object);
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......@@ -219,7 +219,7 @@ TEST_F(MarkingVisitorTest, MarkMemberInConstruction) {
Member<GCedWithInConstructionCallback> object(obj);
visitor.Trace(object);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
HeapObjectHeader& header = HeapObjectHeader::FromObject(gced);
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
......@@ -234,7 +234,7 @@ TEST_F(MarkingVisitorTest, MarkMemberMixinInConstruction) {
Member<MixinWithInConstructionCallback> mixin(obj);
visitor.Trace(mixin);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
HeapObjectHeader& header = HeapObjectHeader::FromObject(gced);
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
......@@ -249,7 +249,7 @@ TEST_F(MarkingVisitorTest, DontMarkWeakMemberInConstruction) {
WeakMember<GCedWithInConstructionCallback> object(obj);
visitor.Trace(object);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
HeapObjectHeader& header = HeapObjectHeader::FromObject(gced);
EXPECT_FALSE(
visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
......@@ -265,7 +265,7 @@ TEST_F(MarkingVisitorTest, DontMarkWeakMemberMixinInConstruction) {
WeakMember<MixinWithInConstructionCallback> mixin(obj);
visitor.Trace(mixin);
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
HeapObjectHeader& header = HeapObjectHeader::FromObject(gced);
EXPECT_FALSE(
visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
......@@ -281,7 +281,7 @@ TEST_F(MarkingVisitorTest, MarkPersistentInConstruction) {
Persistent<GCedWithInConstructionCallback> object(obj);
visitor.TraceRootForTesting(object, SourceLocation::Current());
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
HeapObjectHeader& header = HeapObjectHeader::FromObject(gced);
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
......@@ -296,7 +296,7 @@ TEST_F(MarkingVisitorTest, MarkPersistentMixinInConstruction) {
Persistent<MixinWithInConstructionCallback> mixin(obj);
visitor.TraceRootForTesting(mixin, SourceLocation::Current());
});
HeapObjectHeader& header = HeapObjectHeader::FromPayload(gced);
HeapObjectHeader& header = HeapObjectHeader::FromObject(gced);
EXPECT_TRUE(visitor.marking_state().not_fully_constructed_worklist().Contains(
&header));
EXPECT_FALSE(header.IsMarked());
......@@ -304,7 +304,7 @@ TEST_F(MarkingVisitorTest, MarkPersistentMixinInConstruction) {
TEST_F(MarkingVisitorTest, StrongTracingMarksWeakMember) {
WeakMember<GCed> object(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromPayload(object);
HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
TestMarkingVisitor visitor(GetMarker());
......
......@@ -108,9 +108,9 @@ TYPED_TEST(MinorGCTestForType, StickyBits) {
Persistent<Type> p1 = MakeGarbageCollected<Type>(this->GetAllocationHandle());
TestFixture::CollectMinor();
EXPECT_FALSE(HeapObjectHeader::FromPayload(p1.Get()).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromObject(p1.Get()).IsYoung());
TestFixture::CollectMajor();
EXPECT_FALSE(HeapObjectHeader::FromPayload(p1.Get()).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromObject(p1.Get()).IsYoung());
EXPECT_EQ(0u, TestFixture::DestructedObjects());
}
......@@ -120,14 +120,14 @@ TYPED_TEST(MinorGCTestForType, OldObjectIsNotVisited) {
Persistent<Type> p = MakeGarbageCollected<Type>(this->GetAllocationHandle());
TestFixture::CollectMinor();
EXPECT_EQ(0u, TestFixture::DestructedObjects());
EXPECT_FALSE(HeapObjectHeader::FromPayload(p.Get()).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromObject(p.Get()).IsYoung());
// Check that the old deleted object won't be visited during minor GC.
Type* raw = p.Release();
TestFixture::CollectMinor();
EXPECT_EQ(0u, TestFixture::DestructedObjects());
EXPECT_FALSE(HeapObjectHeader::FromPayload(raw).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromPayload(raw).IsFree());
EXPECT_FALSE(HeapObjectHeader::FromObject(raw).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromObject(raw).IsFree());
// Check that the old deleted object will be revisited in major GC.
TestFixture::CollectMajor();
......@@ -139,7 +139,7 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
Persistent<Type1> old =
MakeGarbageCollected<Type1>(heap->GetAllocationHandle());
test->CollectMinor();
EXPECT_FALSE(HeapObjectHeader::FromPayload(old.Get()).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromObject(old.Get()).IsYoung());
Type2* young = nullptr;
......@@ -151,7 +151,7 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
auto* ptr = MakeGarbageCollected<Type2>(heap->GetAllocationHandle());
ptr->next = young;
young = ptr;
EXPECT_TRUE(HeapObjectHeader::FromPayload(young).IsYoung());
EXPECT_TRUE(HeapObjectHeader::FromObject(young).IsYoung());
}
}
......@@ -170,8 +170,8 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) {
EXPECT_TRUE(set.empty());
for (size_t i = 0; i < 64; ++i) {
EXPECT_FALSE(HeapObjectHeader::FromPayload(young).IsFree());
EXPECT_FALSE(HeapObjectHeader::FromPayload(young).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromObject(young).IsFree());
EXPECT_FALSE(HeapObjectHeader::FromObject(young).IsYoung());
young = static_cast<Type2*>(young->next.Get());
}
......@@ -217,7 +217,7 @@ TYPED_TEST(MinorGCTestForType, OmitGenerationalBarrierForSentinels) {
MakeGarbageCollected<Type>(this->GetAllocationHandle());
TestFixture::CollectMinor();
EXPECT_FALSE(HeapObjectHeader::FromPayload(old.Get()).IsYoung());
EXPECT_FALSE(HeapObjectHeader::FromObject(old.Get()).IsYoung());
const auto& set = Heap::From(this->GetHeap())->remembered_slots();
const size_t set_size_before_barrier = set.size();
......
......@@ -98,7 +98,7 @@ class HeapObjectHeaderNameTest : public testing::TestWithHeap {};
TEST_F(HeapObjectHeaderNameTest, LookupNameThroughGCInfo) {
auto* no_name = MakeGarbageCollected<NoName>(GetAllocationHandle());
auto no_name_tuple = HeapObjectHeader::FromPayload(no_name).GetName();
auto no_name_tuple = HeapObjectHeader::FromObject(no_name).GetName();
if (NameProvider::HideInternalNames()) {
EXPECT_STREQ(NameProvider::kHiddenName, no_name_tuple.value);
EXPECT_TRUE(no_name_tuple.name_was_hidden);
......@@ -111,7 +111,7 @@ TEST_F(HeapObjectHeaderNameTest, LookupNameThroughGCInfo) {
auto* other_no_name =
MakeGarbageCollected<OtherNoName>(GetAllocationHandle());
auto other_no_name_tuple =
HeapObjectHeader::FromPayload(other_no_name).GetName();
HeapObjectHeader::FromObject(other_no_name).GetName();
if (NameProvider::HideInternalNames()) {
EXPECT_STREQ(NameProvider::kHiddenName, no_name_tuple.value);
EXPECT_TRUE(no_name_tuple.name_was_hidden);
......@@ -124,7 +124,7 @@ TEST_F(HeapObjectHeaderNameTest, LookupNameThroughGCInfo) {
auto* class_with_name =
MakeGarbageCollected<ClassWithName>(GetAllocationHandle(), "CustomName");
auto class_with_name_tuple =
HeapObjectHeader::FromPayload(class_with_name).GetName();
HeapObjectHeader::FromObject(class_with_name).GetName();
EXPECT_STREQ("CustomName", class_with_name_tuple.value);
EXPECT_FALSE(class_with_name_tuple.name_was_hidden);
}
......
......@@ -58,7 +58,7 @@ class SweeperTest : public testing::TestWithHeap {
}
void MarkObject(void* payload) {
HeapObjectHeader& header = HeapObjectHeader::FromPayload(payload);
HeapObjectHeader& header = HeapObjectHeader::FromObject(payload);
header.TryMarkAtomic();
}
......@@ -196,10 +196,10 @@ TEST_F(SweeperTest, CoalesceFreeListEntries) {
MarkObject(object4);
Address object2_start =
reinterpret_cast<Address>(&HeapObjectHeader::FromPayload(object2));
reinterpret_cast<Address>(&HeapObjectHeader::FromObject(object2));
Address object3_end =
reinterpret_cast<Address>(&HeapObjectHeader::FromPayload(object3)) +
HeapObjectHeader::FromPayload(object3).GetSize();
reinterpret_cast<Address>(&HeapObjectHeader::FromObject(object3)) +
HeapObjectHeader::FromObject(object3).AllocatedSize();
const BasePage* page = BasePage::FromPayload(object2);
const FreeList& freelist = NormalPageSpace::From(page->space())->free_list();
......@@ -249,8 +249,8 @@ TEST_F(SweeperTest, UnmarkObjects) {
MakeGarbageCollected<GCed<kLargeObjectSizeThreshold * 2>>(
GetAllocationHandle());
auto& normal_object_header = HeapObjectHeader::FromPayload(normal_object);
auto& large_object_header = HeapObjectHeader::FromPayload(large_object);
auto& normal_object_header = HeapObjectHeader::FromObject(normal_object);
auto& large_object_header = HeapObjectHeader::FromObject(large_object);
normal_object_header.TryMarkAtomic();
large_object_header.TryMarkAtomic();
......
......@@ -54,7 +54,7 @@ class V8_NODISCARD ExpectWriteBarrierFires final
EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (void* object : objects) {
headers_.push_back(&HeapObjectHeader::FromPayload(object));
headers_.push_back(&HeapObjectHeader::FromObject(object));
EXPECT_FALSE(headers_.back()->IsMarked());
}
}
......@@ -71,7 +71,8 @@ class V8_NODISCARD ExpectWriteBarrierFires final
{
HeapObjectHeader* item;
while (write_barrier_worklist_.Pop(&item)) {
auto pos = std::find(objects_.begin(), objects_.end(), item->Payload());
auto pos =
std::find(objects_.begin(), objects_.end(), item->ObjectStart());
if (pos != objects_.end()) objects_.erase(pos);
}
}
......@@ -104,7 +105,7 @@ class V8_NODISCARD ExpectNoWriteBarrierFires final
EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (void* object : objects) {
auto* header = &HeapObjectHeader::FromPayload(object);
auto* header = &HeapObjectHeader::FromObject(object);
headers_.emplace_back(header, header->IsMarked());
}
}
......@@ -131,7 +132,7 @@ class GCed : public GarbageCollected<GCed> {
void Trace(cppgc::Visitor* v) const { v->Trace(next_); }
bool IsMarked() const {
return HeapObjectHeader::FromPayload(this).IsMarked();
return HeapObjectHeader::FromObject(this).IsMarked();
}
void set_next(GCed* next) { next_ = next; }
......@@ -201,7 +202,7 @@ TEST_F(NoWriteBarrierTest, BailoutWhenMarkingIsOff) {
TEST_F(WriteBarrierTest, BailoutIfMarked) {
auto* object1 = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* object2 = MakeGarbageCollected<GCed>(GetAllocationHandle());
EXPECT_TRUE(HeapObjectHeader::FromPayload(object1).TryMarkAtomic());
EXPECT_TRUE(HeapObjectHeader::FromObject(object1).TryMarkAtomic());
{
ExpectNoWriteBarrierFires scope(marker(), {object1});
object2->set_next(object1);
......@@ -213,7 +214,7 @@ TEST_F(WriteBarrierTest, MemberInitializingStoreNoBarrier) {
{
ExpectNoWriteBarrierFires scope(marker(), {object1});
auto* object2 = MakeGarbageCollected<GCed>(GetAllocationHandle(), object1);
HeapObjectHeader& object2_header = HeapObjectHeader::FromPayload(object2);
HeapObjectHeader& object2_header = HeapObjectHeader::FromObject(object2);
EXPECT_FALSE(object2_header.IsMarked());
}
}
......@@ -313,7 +314,7 @@ TEST_F(WriteBarrierTest, NoWriteBarrierOnMarkedMixinApplication) {
ParentWithMixinPointer* parent =
MakeGarbageCollected<ParentWithMixinPointer>(GetAllocationHandle());
auto* child = MakeGarbageCollected<Child>(GetAllocationHandle());
EXPECT_TRUE(HeapObjectHeader::FromPayload(child).TryMarkAtomic());
EXPECT_TRUE(HeapObjectHeader::FromObject(child).TryMarkAtomic());
Mixin* mixin = static_cast<Mixin*>(child);
EXPECT_NE(static_cast<void*>(child), static_cast<void*>(mixin));
{
......@@ -367,7 +368,7 @@ TEST_F(WriteBarrierTest, DijkstraWriteBarrierTriggersWhenMarkingIsOn) {
TEST_F(WriteBarrierTest, DijkstraWriteBarrierBailoutIfMarked) {
auto* object1 = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* object2 = MakeGarbageCollected<GCed>(GetAllocationHandle(), object1);
EXPECT_TRUE(HeapObjectHeader::FromPayload(object1).TryMarkAtomic());
EXPECT_TRUE(HeapObjectHeader::FromObject(object1).TryMarkAtomic());
{
ExpectNoWriteBarrierFires scope(marker(), {object1});
WriteBarrierParams params;
......@@ -430,7 +431,7 @@ TEST_F(WriteBarrierTest, DijkstraWriteBarrierRangeBailoutIfMarked) {
auto* object1 = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* object2 = MakeGarbageCollected<GCedWithInlinedArray>(
GetAllocationHandle(), object1);
EXPECT_TRUE(HeapObjectHeader::FromPayload(object1).TryMarkAtomic());
EXPECT_TRUE(HeapObjectHeader::FromObject(object1).TryMarkAtomic());
{
ExpectNoWriteBarrierFires scope(marker(), {object1});
WriteBarrierParams params;
......@@ -450,7 +451,7 @@ TEST_F(WriteBarrierTest, SteeleWriteBarrierTriggersWhenMarkingIsOn) {
auto* object2 = MakeGarbageCollected<GCed>(GetAllocationHandle(), object1);
{
ExpectWriteBarrierFires scope(marker(), {object1});
EXPECT_TRUE(HeapObjectHeader::FromPayload(object1).TryMarkAtomic());
EXPECT_TRUE(HeapObjectHeader::FromObject(object1).TryMarkAtomic());
WriteBarrierParams params;
EXPECT_EQ(WriteBarrierType::kMarking,
HeapConsistency::GetWriteBarrierType(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment