Commit 6925bc09 authored by Leon Bettscheider's avatar Leon Bettscheider Committed by V8 LUCI CQ

[heap] Refactor methods and nested class of ConcurrentMarkingVisitor

This CL moves a few methods and nested class SlotSnapshottingVisitor
from ConcurrentMarkingVisitor to ConcurrentMarkingVisitorUtility.

Methods in ConcurrentMarkingVisitorUtility are now static and instead have a Visitor parameter.

This is preparatory work for adding a
YoungGenerationConcurrentMarkingVisitor class, which will be able to
reuse members of ConcurrentMarkingVisitorUtility.

Bug: v8:13012
Change-Id: I503c20e655578031018a2e37dd92c1d61bbe1686
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3779677
Commit-Queue: Leon Bettscheider <bettscheider@google.com>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81931}
parent e93a09c2
......@@ -82,6 +82,128 @@ class SlotSnapshot {
std::pair<ObjectSlot, Object> snapshot_[kMaxSnapshotSize];
};
class ConcurrentMarkingVisitorUtility {
public:
template <typename Visitor, typename T,
typename TBodyDescriptor = typename T::BodyDescriptor>
static int VisitJSObjectSubclass(Visitor* visitor, Map map, T object) {
if (!visitor->ShouldVisit(object)) return 0;
int size = TBodyDescriptor::SizeOf(map, object);
int used_size = map.UsedInstanceSize();
DCHECK_LE(used_size, size);
DCHECK_GE(used_size, JSObject::GetHeaderSize(map));
visitor->VisitMapPointer(object);
// It is important to visit only the used field and ignore the slack fields
// because the slack fields may be trimmed concurrently.
TBodyDescriptor::IterateBody(map, object, used_size, visitor);
return size;
}
template <typename Visitor, typename T>
static int VisitJSObjectSubclassFast(Visitor* visitor, Map map, T object) {
using TBodyDescriptor = typename T::FastBodyDescriptor;
return VisitJSObjectSubclass<Visitor, T, TBodyDescriptor>(visitor, map,
object);
}
template <typename Visitor>
static void VisitPointersInSnapshot(Visitor* visitor, HeapObject host,
const SlotSnapshot& snapshot) {
for (int i = 0; i < snapshot.number_of_slots(); i++) {
ObjectSlot slot = snapshot.slot(i);
Object object = snapshot.value(i);
DCHECK(!HasWeakHeapObjectTag(object));
if (!object.IsHeapObject()) continue;
HeapObject heap_object = HeapObject::cast(object);
visitor->SynchronizePageAccess(heap_object);
BasicMemoryChunk* target_page =
BasicMemoryChunk::FromHeapObject(heap_object);
if (!visitor->is_shared_heap() && target_page->InSharedHeap()) continue;
visitor->MarkObject(host, heap_object);
visitor->RecordSlot(host, slot, heap_object);
}
}
template <typename Visitor, typename T>
static int VisitFullyWithSnapshot(Visitor* visitor, Map map, T object) {
using TBodyDescriptor = typename T::BodyDescriptor;
int size = TBodyDescriptor::SizeOf(map, object);
const SlotSnapshot& snapshot =
MakeSlotSnapshot<Visitor, T, TBodyDescriptor>(visitor, map, object,
size);
if (!visitor->ShouldVisit(object)) return 0;
ConcurrentMarkingVisitorUtility::VisitPointersInSnapshot(visitor, object,
snapshot);
return size;
}
template <typename Visitor, typename T, typename TBodyDescriptor>
static const SlotSnapshot& MakeSlotSnapshot(Visitor* visitor, Map map,
T object, int size) {
SlotSnapshottingVisitor slot_snaphotting_visitor(visitor->slot_snapshot(),
visitor->cage_base(),
visitor->code_cage_base());
slot_snaphotting_visitor.VisitPointer(object, object.map_slot());
TBodyDescriptor::IterateBody(map, object, size, &slot_snaphotting_visitor);
return *(visitor->slot_snapshot());
}
// Helper class for collecting in-object slot addresses and values.
class SlotSnapshottingVisitor final : public ObjectVisitorWithCageBases {
public:
explicit SlotSnapshottingVisitor(SlotSnapshot* slot_snapshot,
PtrComprCageBase cage_base,
PtrComprCageBase code_cage_base)
: ObjectVisitorWithCageBases(cage_base, code_cage_base),
slot_snapshot_(slot_snapshot) {
slot_snapshot_->clear();
}
void VisitPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
for (ObjectSlot p = start; p < end; ++p) {
Object object = p.Relaxed_Load(cage_base());
slot_snapshot_->add(p, object);
}
}
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
Object code = slot.Relaxed_Load(code_cage_base());
slot_snapshot_->add(ObjectSlot(slot.address()), code);
}
void VisitPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) override {
// This should never happen, because we don't use snapshotting for objects
// which contain weak references.
UNREACHABLE();
}
void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
// This should never happen, because snapshotting is performed only on
// some String subclasses.
UNREACHABLE();
}
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
// This should never happen, because snapshotting is performed only on
// some String subclasses.
UNREACHABLE();
}
void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
// This should never happen, because snapshotting is performed only on
// some String subclasses.
UNREACHABLE();
}
private:
SlotSnapshot* slot_snapshot_;
};
};
class ConcurrentMarkingVisitor final
: public MarkingVisitorBase<ConcurrentMarkingVisitor,
ConcurrentMarkingState> {
......@@ -111,44 +233,54 @@ class ConcurrentMarkingVisitor final
bool AllowDefaultJSObjectVisit() { return false; }
int VisitJSObject(Map map, JSObject object) {
return VisitJSObjectSubclass(map, object);
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
object);
}
int VisitJSObjectFast(Map map, JSObject object) {
return VisitJSObjectSubclassFast(map, object);
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclassFast(this, map,
object);
}
int VisitJSExternalObject(Map map, JSExternalObject object) {
return VisitJSObjectSubclass(map, object);
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
object);
}
#if V8_ENABLE_WEBASSEMBLY
int VisitWasmInstanceObject(Map map, WasmInstanceObject object) {
return VisitJSObjectSubclass(map, object);
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
object);
}
int VisitWasmSuspenderObject(Map map, WasmSuspenderObject object) {
return VisitJSObjectSubclass(map, object);
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
object);
}
#endif // V8_ENABLE_WEBASSEMBLY
int VisitJSWeakCollection(Map map, JSWeakCollection object) {
return VisitJSObjectSubclass(map, object);
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
object);
}
int VisitJSFinalizationRegistry(Map map, JSFinalizationRegistry object) {
return VisitJSObjectSubclass(map, object);
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass(this, map,
object);
}
int VisitConsString(Map map, ConsString object) {
return VisitFullyWithSnapshot(map, object);
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
object);
}
int VisitSlicedString(Map map, SlicedString object) {
return VisitFullyWithSnapshot(map, object);
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
object);
}
int VisitThinString(Map map, ThinString object) {
return VisitFullyWithSnapshot(map, object);
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
object);
}
int VisitSeqOneByteString(Map map, SeqOneByteString object) {
......@@ -187,80 +319,18 @@ class ConcurrentMarkingVisitor final
return marking_state_.GreyToBlackUnaccounted(object);
}
private:
// Helper class for collecting in-object slot addresses and values.
class SlotSnapshottingVisitor final : public ObjectVisitorWithCageBases {
public:
explicit SlotSnapshottingVisitor(SlotSnapshot* slot_snapshot,
PtrComprCageBase cage_base,
PtrComprCageBase code_cage_base)
: ObjectVisitorWithCageBases(cage_base, code_cage_base),
slot_snapshot_(slot_snapshot) {
slot_snapshot_->clear();
}
void VisitPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
for (ObjectSlot p = start; p < end; ++p) {
Object object = p.Relaxed_Load(cage_base());
slot_snapshot_->add(p, object);
}
}
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
Object code = slot.Relaxed_Load(code_cage_base());
slot_snapshot_->add(ObjectSlot(slot.address()), code);
}
void VisitPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) override {
// This should never happen, because we don't use snapshotting for objects
// which contain weak references.
UNREACHABLE();
}
void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
// This should never happen, because snapshotting is performed only on
// some String subclasses.
UNREACHABLE();
}
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
// This should never happen, because snapshotting is performed only on
// some String subclasses.
UNREACHABLE();
}
void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
// This should never happen, because snapshotting is performed only on
// some String subclasses.
UNREACHABLE();
}
private:
SlotSnapshot* slot_snapshot_;
};
template <typename T>
int VisitJSObjectSubclassFast(Map map, T object) {
using TBodyDescriptor = typename T::FastBodyDescriptor;
return VisitJSObjectSubclass<T, TBodyDescriptor>(map, object);
template <typename TSlot>
void RecordSlot(HeapObject object, TSlot slot, HeapObject target) {
MarkCompactCollector::RecordSlot(object, slot, target);
}
SlotSnapshot* slot_snapshot() { return &slot_snapshot_; }
private:
template <typename T, typename TBodyDescriptor = typename T::BodyDescriptor>
int VisitJSObjectSubclass(Map map, T object) {
if (!ShouldVisit(object)) return 0;
int size = TBodyDescriptor::SizeOf(map, object);
int used_size = map.UsedInstanceSize();
DCHECK_LE(used_size, size);
DCHECK_GE(used_size, JSObject::GetHeaderSize(map));
this->VisitMapPointer(object);
// It is important to visit only the used field and ignore the slack fields
// because the slack fields may be trimmed concurrently.
TBodyDescriptor::IterateBody(map, object, used_size, this);
return size;
return ConcurrentMarkingVisitorUtility::VisitJSObjectSubclass<
ConcurrentMarkingVisitor, T, TBodyDescriptor>(this, map, object);
}
template <typename T>
......@@ -282,47 +352,6 @@ class ConcurrentMarkingVisitor final
return size;
}
void VisitPointersInSnapshot(HeapObject host, const SlotSnapshot& snapshot) {
for (int i = 0; i < snapshot.number_of_slots(); i++) {
ObjectSlot slot = snapshot.slot(i);
Object object = snapshot.value(i);
DCHECK(!HasWeakHeapObjectTag(object));
if (!object.IsHeapObject()) continue;
HeapObject heap_object = HeapObject::cast(object);
SynchronizePageAccess(heap_object);
BasicMemoryChunk* target_page =
BasicMemoryChunk::FromHeapObject(heap_object);
if (!is_shared_heap_ && target_page->InSharedHeap()) continue;
MarkObject(host, heap_object);
RecordSlot(host, slot, heap_object);
}
}
template <typename T>
int VisitFullyWithSnapshot(Map map, T object) {
using TBodyDescriptor = typename T::BodyDescriptor;
int size = TBodyDescriptor::SizeOf(map, object);
const SlotSnapshot& snapshot =
MakeSlotSnapshot<T, TBodyDescriptor>(map, object, size);
if (!ShouldVisit(object)) return 0;
VisitPointersInSnapshot(object, snapshot);
return size;
}
template <typename T, typename TBodyDescriptor>
const SlotSnapshot& MakeSlotSnapshot(Map map, T object, int size) {
SlotSnapshottingVisitor visitor(&slot_snapshot_, cage_base(),
code_cage_base());
visitor.VisitPointer(object, object.map_slot());
TBodyDescriptor::IterateBody(map, object, size, &visitor);
return slot_snapshot_;
}
template <typename TSlot>
void RecordSlot(HeapObject object, TSlot slot, HeapObject target) {
MarkCompactCollector::RecordSlot(object, slot, target);
}
void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target) {
if (!MarkCompactCollector::ShouldRecordRelocSlot(host, rinfo, target))
return;
......
......@@ -220,6 +220,11 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
#endif
}
bool is_shared_heap() { return is_shared_heap_; }
// Marks the object grey and pushes it on the marking work list.
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
protected:
ConcreteVisitor* concrete_visitor() {
return static_cast<ConcreteVisitor*>(this);
......@@ -259,8 +264,6 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
// list and visits its header. Returns the size of the descriptor array
// if it was successully marked as black.
V8_INLINE int MarkDescriptorArrayBlack(DescriptorArray descriptors);
// Marks the object grey and pushes it on the marking work list.
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
V8_INLINE void AddStrongReferenceForReferenceSummarizer(HeapObject host,
HeapObject obj) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment