Commit 9ec7c67c authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Split MarkCompactCollector::PrepareRecordRelocSlot

Split method into ShouldRecordRelocSlot and ProcessRelocInfo.
ProcessRelocInfo can then be reused in the write barrier and in the
future for the OLD_TO_SHARED remembered set. SlotTypeForRelocInfoMode
got moved into ProcessRelocInfo.

In addition rename and document SlotTypes. This CL does not change
behavior.

Bug: v8:11708
Change-Id: Iff712e7e6f5d3a4da64510b67b604b9e04998361
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3400968Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78701}
parent a4dfe377
...@@ -318,15 +318,17 @@ class ConcurrentMarkingVisitor final ...@@ -318,15 +318,17 @@ class ConcurrentMarkingVisitor final
} }
void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target) { void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target) {
if (!MarkCompactCollector::ShouldRecordRelocSlot(host, rinfo, target))
return;
MarkCompactCollector::RecordRelocSlotInfo info = MarkCompactCollector::RecordRelocSlotInfo info =
MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target); MarkCompactCollector::ProcessRelocInfo(host, rinfo, target);
if (info.should_record) {
MemoryChunkData& data = (*memory_chunk_data_)[info.memory_chunk]; MemoryChunkData& data = (*memory_chunk_data_)[info.memory_chunk];
if (!data.typed_slots) { if (!data.typed_slots) {
data.typed_slots.reset(new TypedSlots()); data.typed_slots.reset(new TypedSlots());
}
data.typed_slots->Insert(info.slot_type, info.offset);
} }
data.typed_slots->Insert(info.slot_type, info.offset);
} }
void SynchronizePageAccess(HeapObject heap_object) { void SynchronizePageAccess(HeapObject heap_object) {
......
...@@ -4519,25 +4519,25 @@ class SlotVerifyingVisitor : public ObjectVisitorWithCageBases { ...@@ -4519,25 +4519,25 @@ class SlotVerifyingVisitor : public ObjectVisitorWithCageBases {
void VisitCodeTarget(Code host, RelocInfo* rinfo) override { void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
Object target = Code::GetCodeFromTargetAddress(rinfo->target_address()); Object target = Code::GetCodeFromTargetAddress(rinfo->target_address());
if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) { if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
CHECK( CHECK(InTypedSet(SlotType::kCodeEntry, rinfo->pc()) ||
InTypedSet(CODE_TARGET_SLOT, rinfo->pc()) || (rinfo->IsInConstantPool() &&
(rinfo->IsInConstantPool() && InTypedSet(SlotType::kConstPoolCodeEntry,
InTypedSet(CODE_ENTRY_SLOT, rinfo->constant_pool_entry_address()))); rinfo->constant_pool_entry_address())));
} }
} }
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override { void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
Object target = rinfo->target_object(cage_base()); Object target = rinfo->target_object(cage_base());
if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) { if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
CHECK( CHECK(InTypedSet(SlotType::kEmbeddedObjectFull, rinfo->pc()) ||
InTypedSet(FULL_EMBEDDED_OBJECT_SLOT, rinfo->pc()) || InTypedSet(SlotType::kEmbeddedObjectCompressed, rinfo->pc()) ||
InTypedSet(COMPRESSED_EMBEDDED_OBJECT_SLOT, rinfo->pc()) || InTypedSet(SlotType::kEmbeddedObjectData, rinfo->pc()) ||
InTypedSet(DATA_EMBEDDED_OBJECT_SLOT, rinfo->pc()) || (rinfo->IsInConstantPool() &&
(rinfo->IsInConstantPool() && InTypedSet(SlotType::kConstPoolEmbeddedObjectCompressed,
InTypedSet(COMPRESSED_OBJECT_SLOT, rinfo->constant_pool_entry_address())) ||
rinfo->constant_pool_entry_address())) || (rinfo->IsInConstantPool() &&
(rinfo->IsInConstantPool() && InTypedSet(SlotType::kConstPoolEmbeddedObjectFull,
InTypedSet(FULL_OBJECT_SLOT, rinfo->constant_pool_entry_address()))); rinfo->constant_pool_entry_address())));
} }
} }
...@@ -7317,25 +7317,11 @@ void Heap::WriteBarrierForRange(HeapObject object, TSlot start_slot, ...@@ -7317,25 +7317,11 @@ void Heap::WriteBarrierForRange(HeapObject object, TSlot start_slot,
void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo, void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
HeapObject object) { HeapObject object) {
DCHECK(InYoungGeneration(object)); DCHECK(InYoungGeneration(object));
Page* source_page = Page::FromHeapObject(host); const MarkCompactCollector::RecordRelocSlotInfo info =
RelocInfo::Mode rmode = rinfo->rmode(); MarkCompactCollector::ProcessRelocInfo(host, rinfo, object);
Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRelocInfoMode(rmode); RememberedSet<OLD_TO_NEW>::InsertTyped(info.memory_chunk, info.slot_type,
if (rinfo->IsInConstantPool()) { info.offset);
addr = rinfo->constant_pool_entry_address();
if (RelocInfo::IsCodeTargetMode(rmode)) {
slot_type = CODE_ENTRY_SLOT;
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
slot_type = COMPRESSED_OBJECT_SLOT;
} else {
DCHECK(RelocInfo::IsFullEmbeddedObject(rmode));
slot_type = FULL_OBJECT_SLOT;
}
}
uintptr_t offset = addr - source_page->address();
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
RememberedSet<OLD_TO_NEW>::InsertTyped(source_page, slot_type,
static_cast<uint32_t>(offset));
} }
bool Heap::PageFlagsAreConsistent(HeapObject object) { bool Heap::PageFlagsAreConsistent(HeapObject object) {
......
...@@ -2947,54 +2947,76 @@ bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) { ...@@ -2947,54 +2947,76 @@ bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) {
return Page::FromAddress(obj.ptr())->IsEvacuationCandidate(); return Page::FromAddress(obj.ptr())->IsEvacuationCandidate();
} }
// static
bool MarkCompactCollector::ShouldRecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) {
MemoryChunk* source_chunk = MemoryChunk::FromHeapObject(host);
BasicMemoryChunk* target_chunk = BasicMemoryChunk::FromHeapObject(target);
return target_chunk->IsEvacuationCandidate() &&
!source_chunk->ShouldSkipEvacuationSlotRecording();
}
// static
MarkCompactCollector::RecordRelocSlotInfo MarkCompactCollector::RecordRelocSlotInfo
MarkCompactCollector::PrepareRecordRelocSlot(Code host, RelocInfo* rinfo, MarkCompactCollector::ProcessRelocInfo(Code host, RelocInfo* rinfo,
HeapObject target) { HeapObject target) {
DCHECK_EQ(host, rinfo->host()); DCHECK_EQ(host, rinfo->host());
RecordRelocSlotInfo result; RecordRelocSlotInfo result;
result.should_record = false; const RelocInfo::Mode rmode = rinfo->rmode();
Page* target_page = Page::FromHeapObject(target); Address addr;
Page* source_page = Page::FromHeapObject(host); SlotType slot_type;
if (target_page->IsEvacuationCandidate() &&
!source_page->ShouldSkipEvacuationSlotRecording()) { if (rinfo->IsInConstantPool()) {
RelocInfo::Mode rmode = rinfo->rmode(); addr = rinfo->constant_pool_entry_address();
Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRelocInfoMode(rmode); if (RelocInfo::IsCodeTargetMode(rmode)) {
if (rinfo->IsInConstantPool()) { slot_type = SlotType::kConstPoolCodeEntry;
addr = rinfo->constant_pool_entry_address(); } else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
if (RelocInfo::IsCodeTargetMode(rmode)) { slot_type = SlotType::kConstPoolEmbeddedObjectCompressed;
slot_type = CODE_ENTRY_SLOT; } else {
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) { DCHECK(RelocInfo::IsFullEmbeddedObject(rmode));
slot_type = COMPRESSED_OBJECT_SLOT; slot_type = SlotType::kConstPoolEmbeddedObjectFull;
} else { }
DCHECK(RelocInfo::IsFullEmbeddedObject(rmode)); } else {
slot_type = FULL_OBJECT_SLOT; addr = rinfo->pc();
}
if (RelocInfo::IsCodeTargetMode(rmode)) {
slot_type = SlotType::kCodeEntry;
} else if (RelocInfo::IsFullEmbeddedObject(rmode)) {
slot_type = SlotType::kEmbeddedObjectFull;
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
slot_type = SlotType::kEmbeddedObjectCompressed;
} else {
DCHECK(RelocInfo::IsDataEmbeddedObject(rmode));
slot_type = SlotType::kEmbeddedObjectData;
} }
uintptr_t offset = addr - source_page->address();
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
result.should_record = true;
result.memory_chunk = source_page;
result.slot_type = slot_type;
result.offset = static_cast<uint32_t>(offset);
} }
MemoryChunk* const source_chunk = MemoryChunk::FromHeapObject(host);
const uintptr_t offset = addr - source_chunk->address();
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
result.memory_chunk = source_chunk;
result.slot_type = slot_type;
result.offset = static_cast<uint32_t>(offset);
return result; return result;
} }
// static
void MarkCompactCollector::RecordRelocSlot(Code host, RelocInfo* rinfo, void MarkCompactCollector::RecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) { HeapObject target) {
RecordRelocSlotInfo info = PrepareRecordRelocSlot(host, rinfo, target); if (!ShouldRecordRelocSlot(host, rinfo, target)) return;
if (info.should_record) { RecordRelocSlotInfo info = ProcessRelocInfo(host, rinfo, target);
// Access to TypeSlots need to be protected, since LocalHeaps might
// publish code in the background thread. // Access to TypeSlots need to be protected, since LocalHeaps might
base::Optional<base::MutexGuard> opt_guard; // publish code in the background thread.
if (FLAG_concurrent_sparkplug) { base::Optional<base::MutexGuard> opt_guard;
opt_guard.emplace(info.memory_chunk->mutex()); if (FLAG_concurrent_sparkplug) {
} opt_guard.emplace(info.memory_chunk->mutex());
RememberedSet<OLD_TO_OLD>::InsertTyped(info.memory_chunk, info.slot_type,
info.offset);
} }
RememberedSet<OLD_TO_OLD>::InsertTyped(info.memory_chunk, info.slot_type,
info.offset);
} }
namespace { namespace {
......
...@@ -508,11 +508,14 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -508,11 +508,14 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
struct RecordRelocSlotInfo { struct RecordRelocSlotInfo {
MemoryChunk* memory_chunk; MemoryChunk* memory_chunk;
SlotType slot_type; SlotType slot_type;
bool should_record;
uint32_t offset; uint32_t offset;
}; };
static RecordRelocSlotInfo PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target); static bool ShouldRecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target);
static RecordRelocSlotInfo ProcessRelocInfo(Code host, RelocInfo* rinfo,
HeapObject target);
static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target); static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
V8_INLINE static void RecordSlot(HeapObject object, ObjectSlot slot, V8_INLINE static void RecordSlot(HeapObject object, ObjectSlot slot,
HeapObject target); HeapObject target);
......
...@@ -121,15 +121,16 @@ void MarkingBarrier::Write(DescriptorArray descriptor_array, ...@@ -121,15 +121,16 @@ void MarkingBarrier::Write(DescriptorArray descriptor_array,
void MarkingBarrier::RecordRelocSlot(Code host, RelocInfo* rinfo, void MarkingBarrier::RecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) { HeapObject target) {
DCHECK(IsCurrentMarkingBarrier()); DCHECK(IsCurrentMarkingBarrier());
if (!MarkCompactCollector::ShouldRecordRelocSlot(host, rinfo, target)) return;
MarkCompactCollector::RecordRelocSlotInfo info = MarkCompactCollector::RecordRelocSlotInfo info =
MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target); MarkCompactCollector::ProcessRelocInfo(host, rinfo, target);
if (info.should_record) {
auto& typed_slots = typed_slots_map_[info.memory_chunk]; auto& typed_slots = typed_slots_map_[info.memory_chunk];
if (!typed_slots) { if (!typed_slots) {
typed_slots.reset(new TypedSlots()); typed_slots.reset(new TypedSlots());
}
typed_slots->Insert(info.slot_type, info.offset);
} }
typed_slots->Insert(info.slot_type, info.offset);
} }
// static // static
......
...@@ -17,26 +17,26 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap, ...@@ -17,26 +17,26 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap,
Address addr, Address addr,
Callback callback) { Callback callback) {
switch (slot_type) { switch (slot_type) {
case CODE_TARGET_SLOT: { case SlotType::kCodeEntry: {
RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, Code()); RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, Code());
return UpdateCodeTarget(&rinfo, callback); return UpdateCodeTarget(&rinfo, callback);
} }
case CODE_ENTRY_SLOT: { case SlotType::kConstPoolCodeEntry: {
return UpdateCodeEntry(addr, callback); return UpdateCodeEntry(addr, callback);
} }
case COMPRESSED_EMBEDDED_OBJECT_SLOT: { case SlotType::kEmbeddedObjectCompressed: {
RelocInfo rinfo(addr, RelocInfo::COMPRESSED_EMBEDDED_OBJECT, 0, Code()); RelocInfo rinfo(addr, RelocInfo::COMPRESSED_EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback); return UpdateEmbeddedPointer(heap, &rinfo, callback);
} }
case FULL_EMBEDDED_OBJECT_SLOT: { case SlotType::kEmbeddedObjectFull: {
RelocInfo rinfo(addr, RelocInfo::FULL_EMBEDDED_OBJECT, 0, Code()); RelocInfo rinfo(addr, RelocInfo::FULL_EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback); return UpdateEmbeddedPointer(heap, &rinfo, callback);
} }
case DATA_EMBEDDED_OBJECT_SLOT: { case SlotType::kEmbeddedObjectData: {
RelocInfo rinfo(addr, RelocInfo::DATA_EMBEDDED_OBJECT, 0, Code()); RelocInfo rinfo(addr, RelocInfo::DATA_EMBEDDED_OBJECT, 0, Code());
return UpdateEmbeddedPointer(heap, &rinfo, callback); return UpdateEmbeddedPointer(heap, &rinfo, callback);
} }
case COMPRESSED_OBJECT_SLOT: { case SlotType::kConstPoolEmbeddedObjectCompressed: {
HeapObject old_target = HeapObject::cast(Object( HeapObject old_target = HeapObject::cast(Object(
DecompressTaggedAny(heap->isolate(), base::Memory<Tagged_t>(addr)))); DecompressTaggedAny(heap->isolate(), base::Memory<Tagged_t>(addr))));
HeapObject new_target = old_target; HeapObject new_target = old_target;
...@@ -47,10 +47,10 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap, ...@@ -47,10 +47,10 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap,
} }
return result; return result;
} }
case FULL_OBJECT_SLOT: { case SlotType::kConstPoolEmbeddedObjectFull: {
return callback(FullMaybeObjectSlot(addr)); return callback(FullMaybeObjectSlot(addr));
} }
case CLEARED_SLOT: case SlotType::kCleared:
break; break;
} }
UNREACHABLE(); UNREACHABLE();
......
...@@ -391,19 +391,6 @@ class RememberedSetSweeping { ...@@ -391,19 +391,6 @@ class RememberedSetSweeping {
} }
}; };
inline SlotType SlotTypeForRelocInfoMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTargetMode(rmode)) {
return CODE_TARGET_SLOT;
} else if (RelocInfo::IsFullEmbeddedObject(rmode)) {
return FULL_EMBEDDED_OBJECT_SLOT;
} else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) {
return COMPRESSED_EMBEDDED_OBJECT_SLOT;
} else if (RelocInfo::IsDataEmbeddedObject(rmode)) {
return DATA_EMBEDDED_OBJECT_SLOT;
}
UNREACHABLE();
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -64,7 +64,7 @@ void TypedSlotSet::ClearInvalidSlots( ...@@ -64,7 +64,7 @@ void TypedSlotSet::ClearInvalidSlots(
while (chunk != nullptr) { while (chunk != nullptr) {
for (TypedSlot& slot : chunk->buffer) { for (TypedSlot& slot : chunk->buffer) {
SlotType type = TypeField::decode(slot.type_and_offset); SlotType type = TypeField::decode(slot.type_and_offset);
if (type == CLEARED_SLOT) continue; if (type == SlotType::kCleared) continue;
uint32_t offset = OffsetField::decode(slot.type_and_offset); uint32_t offset = OffsetField::decode(slot.type_and_offset);
std::map<uint32_t, uint32_t>::const_iterator upper_bound = std::map<uint32_t, uint32_t>::const_iterator upper_bound =
invalid_ranges.upper_bound(offset); invalid_ranges.upper_bound(offset);
......
...@@ -602,15 +602,43 @@ class SlotSet { ...@@ -602,15 +602,43 @@ class SlotSet {
STATIC_ASSERT(std::is_standard_layout<SlotSet>::value); STATIC_ASSERT(std::is_standard_layout<SlotSet>::value);
STATIC_ASSERT(std::is_standard_layout<SlotSet::Bucket>::value); STATIC_ASSERT(std::is_standard_layout<SlotSet::Bucket>::value);
enum SlotType { enum class SlotType : uint8_t {
FULL_EMBEDDED_OBJECT_SLOT, // Full pointer sized slot storing an object start address.
COMPRESSED_EMBEDDED_OBJECT_SLOT, // RelocInfo::target_object/RelocInfo::set_target_object methods are used for
DATA_EMBEDDED_OBJECT_SLOT, // accessing. Used when pointer is stored in the instruction stream.
FULL_OBJECT_SLOT, kEmbeddedObjectFull,
COMPRESSED_OBJECT_SLOT,
CODE_TARGET_SLOT, // Tagged sized slot storing an object start address.
CODE_ENTRY_SLOT, // RelocInfo::target_object/RelocInfo::set_target_object methods are used for
CLEARED_SLOT // accessing. Used when pointer is stored in the instruction stream.
kEmbeddedObjectCompressed,
// Full pointer sized slot storing an object start address.
// RelocInfo::target_object/RelocInfo::set_target_object methods are used for
// accessing. Used when pointer is stored in the instruction stream.
kEmbeddedObjectData,
// Full pointer sized slot storing instruction start of Code object.
// RelocInfo::target_address/RelocInfo::set_target_address methods are used
// for accessing. Used when pointer is stored in the instruction stream.
kCodeEntry,
// Raw full pointer sized slot. Slot is accessed directly. Used when pointer
// is stored in constant pool.
kConstPoolEmbeddedObjectFull,
// Raw tagged sized slot. Slot is accessed directly. Used when pointer is
// stored in constant pool.
kConstPoolEmbeddedObjectCompressed,
// Raw full pointer sized slot storing instruction start of Code object. Slot
// is accessed directly. Used when pointer is stored in constant pool.
kConstPoolCodeEntry,
// Slot got cleared but has not been removed from the slot set.
kCleared,
kLast = kCleared
}; };
// Data structure for maintaining a list of typed slots in a page. // Data structure for maintaining a list of typed slots in a page.
...@@ -669,7 +697,7 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots { ...@@ -669,7 +697,7 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots {
// This can run concurrently to ClearInvalidSlots(). // This can run concurrently to ClearInvalidSlots().
template <typename Callback> template <typename Callback>
int Iterate(Callback callback, IterationMode mode) { int Iterate(Callback callback, IterationMode mode) {
STATIC_ASSERT(CLEARED_SLOT < 8); STATIC_ASSERT(static_cast<uint8_t>(SlotType::kLast) < 8);
Chunk* chunk = head_; Chunk* chunk = head_;
Chunk* previous = nullptr; Chunk* previous = nullptr;
int new_count = 0; int new_count = 0;
...@@ -677,7 +705,7 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots { ...@@ -677,7 +705,7 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots {
bool empty = true; bool empty = true;
for (TypedSlot& slot : chunk->buffer) { for (TypedSlot& slot : chunk->buffer) {
SlotType type = TypeField::decode(slot.type_and_offset); SlotType type = TypeField::decode(slot.type_and_offset);
if (type != CLEARED_SLOT) { if (type != SlotType::kCleared) {
uint32_t offset = OffsetField::decode(slot.type_and_offset); uint32_t offset = OffsetField::decode(slot.type_and_offset);
Address addr = page_start_ + offset; Address addr = page_start_ + offset;
if (callback(type, addr) == KEEP_SLOT) { if (callback(type, addr) == KEEP_SLOT) {
...@@ -727,7 +755,8 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots { ...@@ -727,7 +755,8 @@ class V8_EXPORT_PRIVATE TypedSlotSet : public TypedSlots {
base::AsAtomicPointer::Relaxed_Store(&head_, chunk); base::AsAtomicPointer::Relaxed_Store(&head_, chunk);
} }
static TypedSlot ClearedTypedSlot() { static TypedSlot ClearedTypedSlot() {
return TypedSlot{TypeField::encode(CLEARED_SLOT) | OffsetField::encode(0)}; return TypedSlot{TypeField::encode(SlotType::kCleared) |
OffsetField::encode(0)};
} }
Address page_start_; Address page_start_;
......
...@@ -239,7 +239,8 @@ TEST(TypedSlotSet, Iterate) { ...@@ -239,7 +239,8 @@ TEST(TypedSlotSet, Iterate) {
static const int kDelta = 10000001; static const int kDelta = 10000001;
int added = 0; int added = 0;
for (uint32_t i = 0; i < TypedSlotSet::kMaxOffset; i += kDelta) { for (uint32_t i = 0; i < TypedSlotSet::kMaxOffset; i += kDelta) {
SlotType type = static_cast<SlotType>(i % CLEARED_SLOT); SlotType type =
static_cast<SlotType>(i % static_cast<uint8_t>(SlotType::kCleared));
set.Insert(type, i); set.Insert(type, i);
++added; ++added;
} }
...@@ -247,7 +248,8 @@ TEST(TypedSlotSet, Iterate) { ...@@ -247,7 +248,8 @@ TEST(TypedSlotSet, Iterate) {
set.Iterate( set.Iterate(
[&iterated](SlotType type, Address addr) { [&iterated](SlotType type, Address addr) {
uint32_t i = static_cast<uint32_t>(addr); uint32_t i = static_cast<uint32_t>(addr);
EXPECT_EQ(i % CLEARED_SLOT, static_cast<uint32_t>(type)); EXPECT_EQ(i % static_cast<uint8_t>(SlotType::kCleared),
static_cast<uint32_t>(type));
EXPECT_EQ(0u, i % kDelta); EXPECT_EQ(0u, i % kDelta);
++iterated; ++iterated;
return i % 2 == 0 ? KEEP_SLOT : REMOVE_SLOT; return i % 2 == 0 ? KEEP_SLOT : REMOVE_SLOT;
...@@ -271,7 +273,8 @@ TEST(TypedSlotSet, ClearInvalidSlots) { ...@@ -271,7 +273,8 @@ TEST(TypedSlotSet, ClearInvalidSlots) {
const int kHostDelta = 100; const int kHostDelta = 100;
uint32_t entries = 10; uint32_t entries = 10;
for (uint32_t i = 0; i < entries; i++) { for (uint32_t i = 0; i < entries; i++) {
SlotType type = static_cast<SlotType>(i % CLEARED_SLOT); SlotType type =
static_cast<SlotType>(i % static_cast<uint8_t>(SlotType::kCleared));
set.Insert(type, i * kHostDelta); set.Insert(type, i * kHostDelta);
} }
...@@ -299,8 +302,8 @@ TEST(TypedSlotSet, Merge) { ...@@ -299,8 +302,8 @@ TEST(TypedSlotSet, Merge) {
TypedSlotSet set0(0), set1(0); TypedSlotSet set0(0), set1(0);
static const uint32_t kEntries = 10000; static const uint32_t kEntries = 10000;
for (uint32_t i = 0; i < kEntries; i++) { for (uint32_t i = 0; i < kEntries; i++) {
set0.Insert(FULL_EMBEDDED_OBJECT_SLOT, 2 * i); set0.Insert(SlotType::kEmbeddedObjectFull, 2 * i);
set1.Insert(FULL_EMBEDDED_OBJECT_SLOT, 2 * i + 1); set1.Insert(SlotType::kEmbeddedObjectFull, 2 * i + 1);
} }
uint32_t count = 0; uint32_t count = 0;
set0.Merge(&set1); set0.Merge(&set1);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment