Commit 5e3767f8 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[ptr-compr] Use FullHeapObjectSlot for off-heap HeapObject slots

and a bit of drive-by cleanup.

Bug: v8:8518
Change-Id: I46873f0a5e56509d75f2d169dc7a4372cc94efbc
Reviewed-on: https://chromium-review.googlesource.com/c/1370027Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58130}
parent f7db2726
......@@ -109,13 +109,15 @@ class ConcurrentMarkingVisitor final
bool AllowDefaultJSObjectVisit() { return false; }
void ProcessStrongHeapObject(HeapObject* host, ObjectSlot slot,
template <typename THeapObjectSlot>
void ProcessStrongHeapObject(HeapObject* host, THeapObjectSlot slot,
HeapObject* heap_object) {
MarkObject(heap_object);
MarkCompactCollector::RecordSlot(host, slot, heap_object);
}
void ProcessWeakHeapObject(HeapObject* host, HeapObjectSlot slot,
template <typename THeapObjectSlot>
void ProcessWeakHeapObject(HeapObject* host, THeapObjectSlot slot,
HeapObject* heap_object) {
#ifdef THREAD_SANITIZER
// Perform a dummy acquire load to tell TSAN that there is no data race
......@@ -149,6 +151,7 @@ class ConcurrentMarkingVisitor final
template <typename TSlot>
V8_INLINE void VisitPointersImpl(HeapObject* host, TSlot start, TSlot end) {
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
for (TSlot slot = start; slot < end; ++slot) {
typename TSlot::TObject object = slot.Relaxed_Load();
HeapObject* heap_object;
......@@ -156,10 +159,10 @@ class ConcurrentMarkingVisitor final
// If the reference changes concurrently from strong to weak, the write
// barrier will treat the weak reference as strong, so we won't miss the
// weak reference.
ProcessStrongHeapObject(host, ObjectSlot(slot), heap_object);
ProcessStrongHeapObject(host, THeapObjectSlot(slot), heap_object);
} else if (TSlot::kCanBeWeak &&
object.GetHeapObjectIfWeak(&heap_object)) {
ProcessWeakHeapObject(host, HeapObjectSlot(slot), heap_object);
ProcessWeakHeapObject(host, THeapObjectSlot(slot), heap_object);
}
}
}
......
......@@ -208,6 +208,9 @@ template <typename TSlot>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitPointerImpl(HeapObject* host,
TSlot slot) {
static_assert(std::is_same<TSlot, ObjectSlot>::value ||
std::is_same<TSlot, MaybeObjectSlot>::value,
"Only ObjectSlot and MaybeObjectSlot are expected here");
typename TSlot::TObject object = slot.load();
HeapObject* target_object;
if (object.GetHeapObjectIfStrong(&target_object)) {
......
This diff is collapsed.
......@@ -268,17 +268,47 @@ class RememberedSet : public AllStatic {
class UpdateTypedSlotHelper {
public:
// Updates a typed slot using an untyped slot callback.
// The callback accepts MaybeObjectSlot and returns SlotCallbackResult.
template <typename Callback>
static SlotCallbackResult UpdateTypedSlot(Heap* heap, SlotType slot_type,
Address addr, Callback callback) {
switch (slot_type) {
case CODE_TARGET_SLOT: {
RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, Code());
return UpdateCodeTarget(&rinfo, callback);
}
case CODE_ENTRY_SLOT: {
return UpdateCodeEntry(addr, callback);
}
case EMBEDDED_OBJECT_SLOT: {
RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback);
}
case OBJECT_SLOT: {
// TODO(ishell): the incoming addr represents MaybeObjectSlot(addr).
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
return callback(FullMaybeObjectSlot(addr));
}
case CLEARED_SLOT:
break;
}
UNREACHABLE();
}
private:
// Updates a code entry slot using an untyped slot callback.
// The callback accepts MaybeObjectSlot and returns SlotCallbackResult.
template <typename Callback>
static SlotCallbackResult UpdateCodeEntry(Address entry_address,
Callback callback) {
Object* code = Code::GetObjectFromEntryAddress(entry_address);
Object* old_code = code;
SlotCallbackResult result = callback(MaybeObjectSlot(&code));
DCHECK(!HasWeakHeapObjectTag(code));
Code code = Code::GetObjectFromEntryAddress(entry_address);
Code old_code = code;
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
SlotCallbackResult result = callback(FullMaybeObjectSlot(&code));
DCHECK(!HasWeakHeapObjectTag(code.ptr()));
if (code != old_code) {
Memory<Address>(entry_address) = Code::cast(code)->entry();
Memory<Address>(entry_address) = code->entry();
}
return result;
}
......@@ -290,9 +320,10 @@ class UpdateTypedSlotHelper {
Callback callback) {
DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
Code old_target = Code::GetCodeFromTargetAddress(rinfo->target_address());
Object* new_target = old_target;
SlotCallbackResult result = callback(MaybeObjectSlot(&new_target));
DCHECK(!HasWeakHeapObjectTag(new_target));
Code new_target = old_target;
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
SlotCallbackResult result = callback(FullMaybeObjectSlot(&new_target));
DCHECK(!HasWeakHeapObjectTag(new_target.ptr()));
if (new_target != old_target) {
rinfo->set_target_address(
Code::cast(new_target)->raw_instruction_start());
......@@ -307,40 +338,15 @@ class UpdateTypedSlotHelper {
Callback callback) {
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
HeapObject* old_target = rinfo->target_object();
Object* new_target = old_target;
SlotCallbackResult result = callback(MaybeObjectSlot(&new_target));
HeapObject* new_target = old_target;
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
SlotCallbackResult result = callback(FullMaybeObjectSlot(&new_target));
DCHECK(!HasWeakHeapObjectTag(new_target));
if (new_target != old_target) {
rinfo->set_target_object(heap, HeapObject::cast(new_target));
}
return result;
}
// Updates a typed slot using an untyped slot callback.
// The callback accepts MaybeObjectSlot and returns SlotCallbackResult.
template <typename Callback>
static SlotCallbackResult UpdateTypedSlot(Heap* heap, SlotType slot_type,
Address addr, Callback callback) {
switch (slot_type) {
case CODE_TARGET_SLOT: {
RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, Code());
return UpdateCodeTarget(&rinfo, callback);
}
case CODE_ENTRY_SLOT: {
return UpdateCodeEntry(addr, callback);
}
case EMBEDDED_OBJECT_SLOT: {
RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback);
}
case OBJECT_SLOT: {
return callback(MaybeObjectSlot(addr));
}
case CLEARED_SLOT:
break;
}
UNREACHABLE();
}
};
inline SlotType SlotTypeForRelocInfoMode(RelocInfo::Mode rmode) {
......
......@@ -147,10 +147,14 @@ bool Scavenger::MigrateObject(Map map, HeapObject* source, HeapObject* target,
return true;
}
template <typename THeapObjectSlot>
CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map map,
HeapObjectSlot slot,
THeapObjectSlot slot,
HeapObject* object,
int object_size) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
AllocationResult allocation =
......@@ -179,9 +183,13 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map map,
return CopyAndForwardResult::FAILURE;
}
CopyAndForwardResult Scavenger::PromoteObject(Map map, HeapObjectSlot slot,
template <typename THeapObjectSlot>
CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
HeapObject* object,
int object_size) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
AllocationResult allocation =
allocator_.Allocate(OLD_SPACE, object_size, alignment);
......@@ -239,10 +247,14 @@ bool Scavenger::HandleLargeObject(Map map, HeapObject* object,
return false;
}
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateObjectDefault(Map map,
HeapObjectSlot slot,
THeapObjectSlot slot,
HeapObject* object,
int object_size) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
SLOW_DCHECK(object->SizeFromMap(map) == object_size);
CopyAndForwardResult result;
......@@ -280,9 +292,13 @@ SlotCallbackResult Scavenger::EvacuateObjectDefault(Map map,
UNREACHABLE();
}
SlotCallbackResult Scavenger::EvacuateThinString(Map map, HeapObjectSlot slot,
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateThinString(Map map, THeapObjectSlot slot,
ThinString object,
int object_size) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
if (!is_incremental_marking_) {
// The ThinString should die after Scavenge, so avoid writing the proper
// forwarding pointer and instead just signal the actual object as forwarded
......@@ -298,10 +314,14 @@ SlotCallbackResult Scavenger::EvacuateThinString(Map map, HeapObjectSlot slot,
return EvacuateObjectDefault(map, slot, object, object_size);
}
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
HeapObjectSlot slot,
THeapObjectSlot slot,
ConsString object,
int object_size) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
DCHECK(IsShortcutCandidate(map->instance_type()));
if (!is_incremental_marking_ &&
object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
......@@ -335,8 +355,12 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
return EvacuateObjectDefault(map, slot, object, object_size);
}
SlotCallbackResult Scavenger::EvacuateObject(HeapObjectSlot slot, Map map,
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
HeapObject* source) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
SLOW_DCHECK(Heap::InFromSpace(source));
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
int size = source->SizeFromMap(map);
......@@ -358,8 +382,12 @@ SlotCallbackResult Scavenger::EvacuateObject(HeapObjectSlot slot, Map map,
}
}
SlotCallbackResult Scavenger::ScavengeObject(HeapObjectSlot p,
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
HeapObject* object) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
DCHECK(Heap::InFromSpace(object));
// Synchronized load that consumes the publishing CAS of MigrateObject.
......@@ -391,15 +419,20 @@ SlotCallbackResult Scavenger::ScavengeObject(HeapObjectSlot p,
return EvacuateObject(p, map, object);
}
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
MaybeObjectSlot slot) {
template <typename TSlot>
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, TSlot slot) {
static_assert(
std::is_same<TSlot, FullMaybeObjectSlot>::value ||
std::is_same<TSlot, MaybeObjectSlot>::value,
"Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
MaybeObject object = *slot;
if (Heap::InFromSpace(object)) {
HeapObject* heap_object = object->GetHeapObject();
DCHECK(heap_object->IsHeapObject());
SlotCallbackResult result =
ScavengeObject(HeapObjectSlot(slot), heap_object);
ScavengeObject(THeapObjectSlot(slot), heap_object);
DCHECK_IMPLIES(result == REMOVE_SLOT,
!heap->IsInYoungGeneration((*slot)->GetHeapObject()));
return result;
......
......@@ -98,19 +98,25 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
private:
template <typename TSlot>
V8_INLINE void VisitPointersImpl(HeapObject* host, TSlot start, TSlot end) {
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
// Treat weak references as strong.
// TODO(marja): Proper weakness handling in the young generation.
for (TSlot slot = start; slot < end; ++slot) {
typename TSlot::TObject object = slot.load();
HeapObject* heap_object;
if (object.GetHeapObject(&heap_object)) {
HandleSlot(host, HeapObjectSlot(slot), heap_object);
HandleSlot(host, THeapObjectSlot(slot), heap_object);
}
}
}
V8_INLINE void HandleSlot(HeapObject* host, HeapObjectSlot slot,
template <typename THeapObjectSlot>
V8_INLINE void HandleSlot(HeapObject* host, THeapObjectSlot slot,
HeapObject* target) {
static_assert(
std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
scavenger_->PageMemoryFence(MaybeObject::FromObject(target));
if (Heap::InFromSpace(target)) {
......@@ -383,7 +389,7 @@ void Scavenger::ScavengePage(MemoryChunk* page) {
RememberedSet<OLD_TO_NEW>::IterateTyped(
page, [this](SlotType type, Address host_addr, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
heap_, type, addr, [this](MaybeObjectSlot slot) {
heap_, type, addr, [this](FullMaybeObjectSlot slot) {
return CheckAndScavengeObject(heap(), slot);
});
});
......@@ -452,7 +458,7 @@ void RootScavengeVisitor::ScavengePointer(FullObjectSlot p) {
DCHECK(!HasWeakHeapObjectTag(object));
if (!Heap::InNewSpace(object)) return;
scavenger_->ScavengeObject(HeapObjectSlot(p),
scavenger_->ScavengeObject(FullHeapObjectSlot(p),
reinterpret_cast<HeapObject*>(object));
}
......
......@@ -141,12 +141,13 @@ class Scavenger {
// Potentially scavenges an object referenced from |slot| if it is
// indeed a HeapObject and resides in from space.
inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
MaybeObjectSlot slot);
template <typename TSlot>
inline SlotCallbackResult CheckAndScavengeObject(Heap* heap, TSlot slot);
// Scavenges an object |object| referenced from slot |p|. |object| is required
// to be in from space.
inline SlotCallbackResult ScavengeObject(HeapObjectSlot p,
template <typename THeapObjectSlot>
inline SlotCallbackResult ScavengeObject(THeapObjectSlot p,
HeapObject* object);
// Copies |source| to |target| and sets the forwarding pointer in |source|.
......@@ -156,33 +157,39 @@ class Scavenger {
V8_INLINE SlotCallbackResult
RememberedSetEntryNeeded(CopyAndForwardResult result);
template <typename THeapObjectSlot>
V8_INLINE CopyAndForwardResult SemiSpaceCopyObject(Map map,
HeapObjectSlot slot,
THeapObjectSlot slot,
HeapObject* object,
int object_size);
V8_INLINE CopyAndForwardResult PromoteObject(Map map, HeapObjectSlot slot,
template <typename THeapObjectSlot>
V8_INLINE CopyAndForwardResult PromoteObject(Map map, THeapObjectSlot slot,
HeapObject* object,
int object_size);
V8_INLINE SlotCallbackResult EvacuateObject(HeapObjectSlot slot, Map map,
template <typename THeapObjectSlot>
V8_INLINE SlotCallbackResult EvacuateObject(THeapObjectSlot slot, Map map,
HeapObject* source);
V8_INLINE bool HandleLargeObject(Map map, HeapObject* object,
int object_size);
// Different cases for object evacuation.
template <typename THeapObjectSlot>
V8_INLINE SlotCallbackResult EvacuateObjectDefault(Map map,
HeapObjectSlot slot,
THeapObjectSlot slot,
HeapObject* object,
int object_size);
inline SlotCallbackResult EvacuateThinString(Map map, HeapObjectSlot slot,
template <typename THeapObjectSlot>
inline SlotCallbackResult EvacuateThinString(Map map, THeapObjectSlot slot,
ThinString object,
int object_size);
template <typename THeapObjectSlot>
inline SlotCallbackResult EvacuateShortcutCandidate(Map map,
HeapObjectSlot slot,
THeapObjectSlot slot,
ConsString object,
int object_size);
......
......@@ -574,12 +574,12 @@ Code Code::GetCodeFromTargetAddress(Address address) {
return Code::unchecked_cast(code);
}
Object* Code::GetObjectFromCodeEntry(Address code_entry) {
return HeapObject::FromAddress(code_entry - Code::kHeaderSize);
}
Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
return GetObjectFromCodeEntry(Memory<Address>(location_of_address));
Code Code::GetObjectFromEntryAddress(Address location_of_address) {
Address code_entry = Memory<Address>(location_of_address);
HeapObject* code = HeapObject::FromAddress(code_entry - Code::kHeaderSize);
// Unchecked cast because we can't rely on the map currently
// not being a forwarding pointer.
return Code::unchecked_cast(code);
}
bool Code::CanContainWeakObjects() {
......
......@@ -215,10 +215,7 @@ class Code : public HeapObjectPtr {
static inline Code GetCodeFromTargetAddress(Address address);
// Convert an entry address into an object.
static inline Object* GetObjectFromEntryAddress(Address location_of_address);
// Convert a code entry into an object.
static inline Object* GetObjectFromCodeEntry(Address code_entry);
static inline Code GetObjectFromEntryAddress(Address location_of_address);
// Returns the address of the first instruction.
inline Address raw_instruction_start() const;
......
......@@ -126,7 +126,11 @@ HeapObjectReference HeapObjectReference::ClearedValue(Isolate* isolate) {
return HeapObjectReference(raw_value);
}
void HeapObjectReference::Update(HeapObjectSlot slot, HeapObject* value) {
template <typename THeapObjectSlot>
void HeapObjectReference::Update(THeapObjectSlot slot, HeapObject* value) {
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
Address old_value = (*slot).ptr();
DCHECK(!HAS_SMI_TAG(old_value));
Address new_value = value->ptr();
......
......@@ -158,7 +158,8 @@ class HeapObjectReference : public MaybeObject {
V8_INLINE static HeapObjectReference ClearedValue(Isolate* isolate);
V8_INLINE static void Update(HeapObjectSlot slot, HeapObject* value);
template <typename THeapObjectSlot>
V8_INLINE static void Update(THeapObjectSlot slot, HeapObject* value);
};
} // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment