Commit 9ec7c67c authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Split MarkCompactCollector::PrepareRecordRelocSlot

Split method into ShouldRecordRelocSlot and ProcessRelocInfo.
ProcessRelocInfo can then be reused in the write barrier and in the
future for the OLD_TO_SHARED remembered set. SlotTypeForRelocInfoMode
got moved into ProcessRelocInfo.

In addition rename and document SlotTypes. This CL does not change
behavior.

Bug: v8:11708
Change-Id: Iff712e7e6f5d3a4da64510b67b604b9e04998361
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3400968Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78701}
parent a4dfe377
......@@ -318,16 +318,18 @@ class ConcurrentMarkingVisitor final
}
void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target) {
if (!MarkCompactCollector::ShouldRecordRelocSlot(host, rinfo, target))
return;
MarkCompactCollector::RecordRelocSlotInfo info =
MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target);
if (info.should_record) {
MarkCompactCollector::ProcessRelocInfo(host, rinfo, target);
MemoryChunkData& data = (*memory_chunk_data_)[info.memory_chunk];
if (!data.typed_slots) {
data.typed_slots.reset(new TypedSlots());
}
data.typed_slots->Insert(info.slot_type, info.offset);
}
}
void SynchronizePageAccess(HeapObject heap_object) {
#ifdef THREAD_SANITIZER
......
......@@ -4519,25 +4519,25 @@ class SlotVerifyingVisitor : public ObjectVisitorWithCageBases {
void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
Object target = Code::GetCodeFromTargetAddress(rinfo->target_address());
if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
CHECK(
InTypedSet(CODE_TARGET_SLOT, rinfo->pc()) ||
CHECK(InTypedSet(SlotType::kCodeEntry, rinfo->pc()) ||
(rinfo->IsInConstantPool() &&
InTypedSet(CODE_ENTRY_SLOT, rinfo->constant_pool_entry_address())));
InTypedSet(SlotType::kConstPoolCodeEntry,
rinfo->constant_pool_entry_address())));
}
}
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
Object target = rinfo->target_object(cage_base());
if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
CHECK(
InTypedSet(FULL_EMBEDDED_OBJECT_SLOT, rinfo->pc()) ||
InTypedSet(COMPRESSED_EMBEDDED_OBJECT_SLOT, rinfo->pc()) ||
InTypedSet(DATA_EMBEDDED_OBJECT_SLOT, rinfo->pc()) ||
CHECK(InTypedSet(SlotType::kEmbeddedObjectFull, rinfo->pc()) ||
InTypedSet(SlotType::kEmbeddedObjectCompressed, rinfo->pc()) ||
InTypedSet(SlotType::kEmbeddedObjectData, rinfo->pc()) ||
(rinfo->IsInConstantPool() &&
InTypedSet(COMPRESSED_OBJECT_SLOT,
InTypedSet(SlotType::kConstPoolEmbeddedObjectCompressed,
rinfo->constant_pool_entry_address())) ||
(rinfo->IsInConstantPool() &&
InTypedSet(FULL_OBJECT_SLOT, rinfo->constant_pool_entry_address())));
InTypedSet(SlotType::kConstPoolEmbeddedObjectFull,
rinfo->constant_pool_entry_address())));
}
}
......@@ -7317,25 +7317,11 @@ void Heap::WriteBarrierForRange(HeapObject object, TSlot start_slot,
void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
HeapObject object) {
DCHECK(InYoungGeneration(object));
Page* source_page = Page::FromHeapObject(host);
RelocInfo::Mode rmode = rinfo->rmode();
Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
if (rinfo->IsInConstantPool()) {
addr = rinfo->constant_pool_entry_address();
if (RelocInfo::IsCodeTargetMode(rmode)) {
slot_type = CODE_ENTRY_SLOT;
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
slot_type = COMPRESSED_OBJECT_SLOT;
} else {
DCHECK(RelocInfo::IsFullEmbeddedObject(rmode));
slot_type = FULL_OBJECT_SLOT;
}
}
uintptr_t offset = addr - source_page->address();
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
RememberedSet<OLD_TO_NEW>::InsertTyped(source_page, slot_type,
static_cast<uint32_t>(offset));
const MarkCompactCollector::RecordRelocSlotInfo info =
MarkCompactCollector::ProcessRelocInfo(host, rinfo, object);
RememberedSet<OLD_TO_NEW>::InsertTyped(info.memory_chunk, info.slot_type,
info.offset);
}
bool Heap::PageFlagsAreConsistent(HeapObject object) {
......
......@@ -2947,45 +2947,68 @@ bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) {
return Page::FromAddress(obj.ptr())->IsEvacuationCandidate();
}
// static
bool MarkCompactCollector::ShouldRecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) {
MemoryChunk* source_chunk = MemoryChunk::FromHeapObject(host);
BasicMemoryChunk* target_chunk = BasicMemoryChunk::FromHeapObject(target);
return target_chunk->IsEvacuationCandidate() &&
!source_chunk->ShouldSkipEvacuationSlotRecording();
}
// static
MarkCompactCollector::RecordRelocSlotInfo
MarkCompactCollector::PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
MarkCompactCollector::ProcessRelocInfo(Code host, RelocInfo* rinfo,
HeapObject target) {
DCHECK_EQ(host, rinfo->host());
RecordRelocSlotInfo result;
result.should_record = false;
Page* target_page = Page::FromHeapObject(target);
Page* source_page = Page::FromHeapObject(host);
if (target_page->IsEvacuationCandidate() &&
!source_page->ShouldSkipEvacuationSlotRecording()) {
RelocInfo::Mode rmode = rinfo->rmode();
Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
const RelocInfo::Mode rmode = rinfo->rmode();
Address addr;
SlotType slot_type;
if (rinfo->IsInConstantPool()) {
addr = rinfo->constant_pool_entry_address();
if (RelocInfo::IsCodeTargetMode(rmode)) {
slot_type = CODE_ENTRY_SLOT;
slot_type = SlotType::kConstPoolCodeEntry;
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
slot_type = COMPRESSED_OBJECT_SLOT;
slot_type = SlotType::kConstPoolEmbeddedObjectCompressed;
} else {
DCHECK(RelocInfo::IsFullEmbeddedObject(rmode));
slot_type = FULL_OBJECT_SLOT;
slot_type = SlotType::kConstPoolEmbeddedObjectFull;
}
} else {
addr = rinfo->pc();
if (RelocInfo::IsCodeTargetMode(rmode)) {
slot_type = SlotType::kCodeEntry;
} else if (RelocInfo::IsFullEmbeddedObject(rmode)) {
slot_type = SlotType::kEmbeddedObjectFull;
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
slot_type = SlotType::kEmbeddedObjectCompressed;
} else {
DCHECK(RelocInfo::IsDataEmbeddedObject(rmode));
slot_type = SlotType::kEmbeddedObjectData;
}
}
uintptr_t offset = addr - source_page->address();
MemoryChunk* const source_chunk = MemoryChunk::FromHeapObject(host);
const uintptr_t offset = addr - source_chunk->address();
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
result.should_record = true;
result.memory_chunk = source_page;
result.memory_chunk = source_chunk;
result.slot_type = slot_type;
result.offset = static_cast<uint32_t>(offset);
}
return result;
}
// static
void MarkCompactCollector::RecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) {
RecordRelocSlotInfo info = PrepareRecordRelocSlot(host, rinfo, target);
if (info.should_record) {
if (!ShouldRecordRelocSlot(host, rinfo, target)) return;
RecordRelocSlotInfo info = ProcessRelocInfo(host, rinfo, target);
// Access to TypeSlots need to be protected, since LocalHeaps might
// publish code in the background thread.
base::Optional<base::MutexGuard> opt_guard;
......@@ -2994,7 +3017,6 @@ void MarkCompactCollector::RecordRelocSlot(Code host, RelocInfo* rinfo,
}
RememberedSet<OLD_TO_OLD>::InsertTyped(info.memory_chunk, info.slot_type,
info.offset);
}
}
namespace {
......
......@@ -508,11 +508,14 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
struct RecordRelocSlotInfo {
MemoryChunk* memory_chunk;
SlotType slot_type;
bool should_record;
uint32_t offset;
};
static RecordRelocSlotInfo PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
static bool ShouldRecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target);
static RecordRelocSlotInfo ProcessRelocInfo(Code host, RelocInfo* rinfo,
HeapObject target);
static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
V8_INLINE static void RecordSlot(HeapObject object, ObjectSlot slot,
HeapObject target);
......
......@@ -121,15 +121,16 @@ void MarkingBarrier::Write(DescriptorArray descriptor_array,
void MarkingBarrier::RecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) {
DCHECK(IsCurrentMarkingBarrier());
if (!MarkCompactCollector::ShouldRecordRelocSlot(host, rinfo, target)) return;
MarkCompactCollector::RecordRelocSlotInfo info =
MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target);
if (info.should_record) {
MarkCompactCollector::ProcessRelocInfo(host, rinfo, target);
auto& typed_slots = typed_slots_map_[info.memory_chunk];
if (!typed_slots) {
typed_slots.reset(new TypedSlots());
}
typed_slots->Insert(info.slot_type, info.offset);
}
}
// static
......
......@@ -17,26 +17,26 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap,
Address addr,
Callback callback) {
switch (slot_type) {
case CODE_TARGET_SLOT: {
case SlotType::kCodeEntry: {
RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, Code());
return UpdateCodeTarget(&rinfo, callback);
}
case CODE_ENTRY_SLOT: {
case SlotType::kConstPoolCodeEntry: {
return UpdateCodeEntry(addr, callback);
}
case COMPRESSED_EMBEDDED_OBJECT_SLOT: {
case SlotType::kEmbeddedObjectCompressed: {
RelocInfo rinfo(addr, RelocInfo::COMPRESSED_EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback);
}
case FULL_EMBEDDED_OBJECT_SLOT: {
case SlotType::kEmbeddedObjectFull: {
RelocInfo rinfo(addr, RelocInfo::FULL_EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback);
}
case DATA_EMBEDDED_OBJECT_SLOT: {
case SlotType::kEmbeddedObjectData: {
RelocInfo rinfo(addr, RelocInfo::DATA_EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback);
}
case COMPRESSED_OBJECT_SLOT: {
case SlotType::kConstPoolEmbeddedObjectCompressed: {
HeapObject old_target = HeapObject::cast(Object(
DecompressTaggedAny(heap->isolate(), base::Memory<Tagged_t>(addr))));
HeapObject new_target = old_target;
......@@ -47,10 +47,10 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap,
}
return result;
}
case FULL_OBJECT_SLOT: {
case SlotType::kConstPoolEmbeddedObjectFull: {
return callback(FullMaybeObjectSlot(addr));
}
case CLEARED_SLOT:
case SlotType::kCleared:
break;
}
UNREACHABLE();
......
......@@ -391,19 +391,6 @@ class RememberedSetSweeping {
}
};
inline SlotType SlotTypeForRelocInfoMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTargetMode(rmode)) {
return CODE_TARGET_SLOT;
} else if (RelocInfo::IsFullEmbeddedObject(rmode)) {
return FULL_EMBEDDED_OBJECT_SLOT;
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
return COMPRESSED_EMBEDDED_OBJECT_SLOT;
} else if (RelocInfo::IsDataEmbeddedObject(rmode)) {
return DATA_EMBEDDED_OBJECT_SLOT;
}
UNREACHABLE();
}
} // namespace internal
} // namespace v8
......
......@@ -64,7 +64,7 @@ void TypedSlotSet::ClearInvalidSlots(
while (chunk != nullptr) {
for (TypedSlot& slot : chunk->buffer) {
SlotType type = TypeField::decode(slot.type_and_offset);
if (type == CLEARED_SLOT) continue;
if (type == SlotType::kCleared) continue;
uint32_t offset = OffsetField::decode(slot.type_and_offset);
std::map<uint32_t, uint32_t>::const_iterator upper_bound =
invalid_ranges.upper_bound(offset);
......
......@@ -602,15 +602,43 @@ class SlotSet {
STATIC_ASSERT(std::is_standard_layout<SlotSet>::value);
STATIC_ASSERT(std::is_standard_layout<SlotSet::Bucket>::value);
enum SlotType {
FULL_EMBEDDED_OBJECT_SLOT,
COMPRESSED_EMBEDDED_OBJECT_SLOT,
DATA_EMBEDDED_OBJECT_SLOT,
FULL_OBJECT_SLOT,
COMPRESSED_OBJECT_SLOT,
CODE_TARGET_SLOT,
CODE_ENTRY_SLOT,
CLEARED_SLOT
enum class SlotType : uint8_t {
// Full pointer sized slot storing an object start address.
// RelocInfo::target_object/RelocInfo::set_target_object methods are used for
// accessing. Used when pointer is stored in the instruction stream.
kEmbeddedObjectFull,
// Tagged sized slot storing an object start address.
// RelocInfo::target_object/RelocInfo::set_target_object methods are used for
// accessing. Used when pointer is stored in the instruction stream.
kEmbeddedObjectCompressed,
// Full pointer sized slot storing an object start address.
// RelocInfo::target_object/RelocInfo::set_target_object methods are used for
// accessing. Used when pointer is stored in the instruction stream.
kEmbeddedObjectData,
// Full pointer sized slot storing instruction start of Code object.
// RelocInfo::target_address/RelocInfo::set_target_address methods are used
// for accessing. Used when pointer is stored in the instruction stream.
kCodeEntry,
// Raw full pointer sized slot. Slot is accessed directly. Used when pointer
// is stored in constant pool.
kConstPoolEmbeddedObjectFull,
// Raw tagged sized slot. Slot is accessed directly. Used when pointer is
// stored in constant pool.
kConstPoolEmbeddedObjectCompressed,
// Raw full pointer sized slot storing instruction start of Code object. Slot
// is accessed directly. Used when pointer is stored in constant pool.
kConstPoolCodeEntry,
// Slot got cleared but has not been removed from the slot set.
kCleared,
kLast = kCleared
};
// Data structure for maintaining a list of typed slots in a page.
......@@ -669,7 +697,7 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots {
// This can run concurrently to ClearInvalidSlots().
template <typename Callback>
int Iterate(Callback callback, IterationMode mode) {
STATIC_ASSERT(CLEARED_SLOT < 8);
STATIC_ASSERT(static_cast<uint8_t>(SlotType::kLast) < 8);
Chunk* chunk = head_;
Chunk* previous = nullptr;
int new_count = 0;
......@@ -677,7 +705,7 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots {
bool empty = true;
for (TypedSlot& slot : chunk->buffer) {
SlotType type = TypeField::decode(slot.type_and_offset);
if (type != CLEARED_SLOT) {
if (type != SlotType::kCleared) {
uint32_t offset = OffsetField::decode(slot.type_and_offset);
Address addr = page_start_ + offset;
if (callback(type, addr) == KEEP_SLOT) {
......@@ -727,7 +755,8 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots {
base::AsAtomicPointer::Relaxed_Store(&head_, chunk);
}
static TypedSlot ClearedTypedSlot() {
return TypedSlot{TypeField::encode(CLEARED_SLOT) | OffsetField::encode(0)};
return TypedSlot{TypeField::encode(SlotType::kCleared) |
OffsetField::encode(0)};
}
Address page_start_;
......
......@@ -239,7 +239,8 @@ TEST(TypedSlotSet, Iterate) {
static const int kDelta = 10000001;
int added = 0;
for (uint32_t i = 0; i < TypedSlotSet::kMaxOffset; i += kDelta) {
SlotType type = static_cast<SlotType>(i % CLEARED_SLOT);
SlotType type =
static_cast<SlotType>(i % static_cast<uint8_t>(SlotType::kCleared));
set.Insert(type, i);
++added;
}
......@@ -247,7 +248,8 @@ TEST(TypedSlotSet, Iterate) {
set.Iterate(
[&iterated](SlotType type, Address addr) {
uint32_t i = static_cast<uint32_t>(addr);
EXPECT_EQ(i % CLEARED_SLOT, static_cast<uint32_t>(type));
EXPECT_EQ(i % static_cast<uint8_t>(SlotType::kCleared),
static_cast<uint32_t>(type));
EXPECT_EQ(0u, i % kDelta);
++iterated;
return i % 2 == 0 ? KEEP_SLOT : REMOVE_SLOT;
......@@ -271,7 +273,8 @@ TEST(TypedSlotSet, ClearInvalidSlots) {
const int kHostDelta = 100;
uint32_t entries = 10;
for (uint32_t i = 0; i < entries; i++) {
SlotType type = static_cast<SlotType>(i % CLEARED_SLOT);
SlotType type =
static_cast<SlotType>(i % static_cast<uint8_t>(SlotType::kCleared));
set.Insert(type, i * kHostDelta);
}
......@@ -299,8 +302,8 @@ TEST(TypedSlotSet, Merge) {
TypedSlotSet set0(0), set1(0);
static const uint32_t kEntries = 10000;
for (uint32_t i = 0; i < kEntries; i++) {
set0.Insert(FULL_EMBEDDED_OBJECT_SLOT, 2 * i);
set1.Insert(FULL_EMBEDDED_OBJECT_SLOT, 2 * i + 1);
set0.Insert(SlotType::kEmbeddedObjectFull, 2 * i);
set1.Insert(SlotType::kEmbeddedObjectFull, 2 * i + 1);
}
uint32_t count = 0;
set0.Merge(&set1);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment