Commit 69b1e0ec authored by Igor Sheludko's avatar Igor Sheludko Committed by V8 LUCI CQ

[ext-code-space][heap] Implement custom marking of CodeObjectSlots

... which will update both the CodeObjectSlot contents and the cached
value of the code entry point when the pointed Code object is
evacuated.
This is done by introducing an OLD_TO_CODE remembered set which is
populated with the recorded slots containing pointers to Code objects.
CodeDataContainer is the only kind of holder that can contain Code
pointers, so having a CodeObjectSlot is enough to compute the holder
CodeDataContainer object and update the cached code entry point there.

This CL fixes the data race in the previous implementation which were
updating the code entry point during Code object migration.

Bug: v8:11880
Change-Id: I44aa46af4bad7eb4eaa922b6876d5f2f836e0791
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3035084
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#75826}
parent cdbd6695
......@@ -177,8 +177,9 @@ class ConcurrentMarkingVisitor final
void VisitPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
PtrComprCageBase cage_base = GetPtrComprCageBase(host);
for (ObjectSlot p = start; p < end; ++p) {
Object object = p.Relaxed_Load();
Object object = p.Relaxed_Load(cage_base);
slot_snapshot_->add(p, object);
}
}
......@@ -186,7 +187,9 @@ class ConcurrentMarkingVisitor final
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
// TODO(v8:11880): support external code space.
VisitPointers(host, ObjectSlot(slot), ObjectSlot(slot + 1));
PtrComprCageBase code_cage_base = GetPtrComprCageBase(host);
Object code = slot.Relaxed_Load(code_cage_base);
slot_snapshot_->add(slot, code);
}
void VisitPointers(HeapObject host, MaybeObjectSlot start,
......
......@@ -3830,7 +3830,9 @@ class SlotCollectingVisitor final : public ObjectVisitor {
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
slots_.push_back(MaybeObjectSlot(slot));
#if V8_EXTERNAL_CODE_SPACE
code_slots_.push_back(slot);
#endif
}
void VisitCodeTarget(Code host, RelocInfo* rinfo) final { UNREACHABLE(); }
......@@ -3844,9 +3846,16 @@ class SlotCollectingVisitor final : public ObjectVisitor {
int number_of_slots() { return static_cast<int>(slots_.size()); }
MaybeObjectSlot slot(int i) { return slots_[i]; }
#if V8_EXTERNAL_CODE_SPACE
ObjectSlot code_slot(int i) { return code_slots_[i]; }
int number_of_code_slots() { return static_cast<int>(code_slots_.size()); }
#endif
private:
std::vector<MaybeObjectSlot> slots_;
#if V8_EXTERNAL_CODE_SPACE
std::vector<ObjectSlot> code_slots_;
#endif
};
void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
......@@ -3883,6 +3892,13 @@ void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
for (int i = 0; i < new_visitor.number_of_slots(); i++) {
DCHECK_EQ(new_visitor.slot(i), old_visitor.slot(i));
}
#if V8_EXTERNAL_CODE_SPACE
DCHECK_EQ(new_visitor.number_of_code_slots(),
old_visitor.number_of_code_slots());
for (int i = 0; i < new_visitor.number_of_code_slots(); i++) {
DCHECK_EQ(new_visitor.code_slot(i), old_visitor.code_slot(i));
}
#endif // V8_EXTERNAL_CODE_SPACE
} else {
DCHECK_EQ(pending_layout_change_object_, object);
pending_layout_change_object_ = HeapObject();
......@@ -4453,7 +4469,12 @@ class SlotVerifyingVisitor : public ObjectVisitor {
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
// TODO(v8:11880): support external code space.
VisitPointers(host, MaybeObjectSlot(slot), MaybeObjectSlot(slot + 1));
PtrComprCageBase code_cage_base =
GetPtrComprCageBaseFromOnHeapAddress(slot.address());
if (ShouldHaveBeenRecorded(
host, MaybeObject::FromObject(slot.load(code_cage_base)))) {
CHECK_GT(untyped_->count(slot.address()), 0);
}
}
void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
......@@ -6329,7 +6350,9 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
// TODO(v8:11880): support external code space.
VisitPointers(host, MaybeObjectSlot(slot), MaybeObjectSlot(slot + 1));
PtrComprCageBase code_cage_base = GetPtrComprCageBase(host);
HeapObject code = HeapObject::unchecked_cast(slot.load(code_cage_base));
MarkHeapObject(code);
}
void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
......
......@@ -67,12 +67,9 @@ void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
HeapObject target) {
BasicMemoryChunk* target_page = BasicMemoryChunk::FromHeapObject(target);
MemoryChunk* source_page = MemoryChunk::FromHeapObject(object);
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
!source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
RememberedSet<OLD_TO_OLD>::Insert<AccessMode::ATOMIC>(source_page,
slot.address());
if (!source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
RecordSlot(source_page, slot, target);
}
}
......@@ -80,8 +77,14 @@ void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
HeapObjectSlot slot, HeapObject target) {
BasicMemoryChunk* target_page = BasicMemoryChunk::FromHeapObject(target);
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>()) {
RememberedSet<OLD_TO_OLD>::Insert<AccessMode::ATOMIC>(source_page,
slot.address());
if (V8_EXTERNAL_CODE_SPACE_BOOL &&
target_page->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) {
RememberedSet<OLD_TO_CODE>::Insert<AccessMode::ATOMIC>(source_page,
slot.address());
} else {
RememberedSet<OLD_TO_OLD>::Insert<AccessMode::ATOMIC>(source_page,
slot.address());
}
}
}
......
This diff is collapsed.
......@@ -88,6 +88,23 @@ MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitPointersImpl(
}
}
template <typename ConcreteVisitor, typename MarkingState>
V8_INLINE void
MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitCodePointerImpl(
HeapObject host, CodeObjectSlot slot) {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
// TODO(v8:11880): support external code space.
PtrComprCageBase code_cage_base = GetPtrComprCageBase(host);
Object object = slot.Relaxed_Load(code_cage_base);
HeapObject heap_object;
if (object.GetHeapObjectIfStrong(&heap_object)) {
// If the reference changes concurrently from strong to weak, the write
// barrier will treat the weak reference as strong, so we won't miss the
// weak reference.
ProcessStrongHeapObject(host, HeapObjectSlot(slot), heap_object);
}
}
template <typename ConcreteVisitor, typename MarkingState>
void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
Code host, RelocInfo* rinfo) {
......
......@@ -154,9 +154,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
VisitPointersImpl(host, start, end);
}
V8_INLINE void VisitCodePointer(HeapObject host, CodeObjectSlot slot) final {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
// TODO(v8:11880): support external code space.
VisitPointer(host, MaybeObjectSlot(slot));
VisitCodePointerImpl(host, slot);
}
V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final;
V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final;
......@@ -183,6 +181,10 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
template <typename TSlot>
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end);
// Similar to VisitPointersImpl() but using code cage base for loading from
// the slot.
V8_INLINE void VisitCodePointerImpl(HeapObject host, CodeObjectSlot slot);
V8_INLINE void VisitDescriptors(DescriptorArray descriptors,
int number_of_own_descriptors);
......
......@@ -26,6 +26,7 @@ class SlotSet;
enum RememberedSetType {
OLD_TO_NEW,
OLD_TO_OLD,
OLD_TO_CODE = V8_EXTERNAL_CODE_SPACE_BOOL ? OLD_TO_OLD + 1 : OLD_TO_OLD,
NUMBER_OF_REMEMBERED_SET_TYPES
};
......
......@@ -115,6 +115,10 @@ MemoryChunk* MemoryChunk::Initialize(BasicMemoryChunk* basic_chunk, Heap* heap,
base::AsAtomicPointer::Release_Store(&chunk->slot_set_[OLD_TO_NEW], nullptr);
base::AsAtomicPointer::Release_Store(&chunk->slot_set_[OLD_TO_OLD], nullptr);
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
base::AsAtomicPointer::Release_Store(&chunk->slot_set_[OLD_TO_CODE],
nullptr);
}
base::AsAtomicPointer::Release_Store(&chunk->sweeping_slot_set_, nullptr);
base::AsAtomicPointer::Release_Store(&chunk->typed_slot_set_[OLD_TO_NEW],
nullptr);
......@@ -122,6 +126,10 @@ MemoryChunk* MemoryChunk::Initialize(BasicMemoryChunk* basic_chunk, Heap* heap,
nullptr);
chunk->invalidated_slots_[OLD_TO_NEW] = nullptr;
chunk->invalidated_slots_[OLD_TO_OLD] = nullptr;
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
// Not actually used but initialize anyway for predictability.
chunk->invalidated_slots_[OLD_TO_CODE] = nullptr;
}
chunk->progress_bar_ = 0;
chunk->set_concurrent_sweeping_state(ConcurrentSweepingState::kDone);
chunk->page_protection_change_mutex_ = new base::Mutex();
......@@ -224,6 +232,7 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
ReleaseSlotSet<OLD_TO_NEW>();
ReleaseSweepingSlotSet();
ReleaseSlotSet<OLD_TO_OLD>();
if (V8_EXTERNAL_CODE_SPACE_BOOL) ReleaseSlotSet<OLD_TO_CODE>();
ReleaseTypedSlotSet<OLD_TO_NEW>();
ReleaseTypedSlotSet<OLD_TO_OLD>();
ReleaseInvalidatedSlots<OLD_TO_NEW>();
......@@ -243,6 +252,7 @@ void MemoryChunk::ReleaseAllAllocatedMemory() {
template V8_EXPORT_PRIVATE SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_NEW>();
template V8_EXPORT_PRIVATE SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_OLD>();
template V8_EXPORT_PRIVATE SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_CODE>();
template <RememberedSetType type>
SlotSet* MemoryChunk::AllocateSlotSet() {
......@@ -267,6 +277,7 @@ SlotSet* MemoryChunk::AllocateSlotSet(SlotSet** slot_set) {
template void MemoryChunk::ReleaseSlotSet<OLD_TO_NEW>();
template void MemoryChunk::ReleaseSlotSet<OLD_TO_OLD>();
template void MemoryChunk::ReleaseSlotSet<OLD_TO_CODE>();
template <RememberedSetType type>
void MemoryChunk::ReleaseSlotSet() {
......
......@@ -278,11 +278,14 @@ class RememberedSet : public AllStatic {
// Clear all old to old slots from the remembered set.
static void ClearAll(Heap* heap) {
STATIC_ASSERT(type == OLD_TO_OLD);
STATIC_ASSERT(type == OLD_TO_OLD || type == OLD_TO_CODE);
OldGenerationMemoryChunkIterator it(heap);
MemoryChunk* chunk;
while ((chunk = it.next()) != nullptr) {
chunk->ReleaseSlotSet<OLD_TO_OLD>();
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
chunk->ReleaseSlotSet<OLD_TO_CODE>();
}
chunk->ReleaseTypedSlotSet<OLD_TO_OLD>();
chunk->ReleaseInvalidatedSlots<OLD_TO_OLD>();
}
......
......@@ -730,12 +730,8 @@ void HeapObject::set_map(Map value) {
#endif
}
Map HeapObject::map(AcquireLoadTag tag) const {
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return HeapObject::map(cage_base, tag);
}
Map HeapObject::map(PtrComprCageBase cage_base, AcquireLoadTag tag) const {
return map_word(cage_base, tag).ToMap();
DEF_ACQUIRE_GETTER(HeapObject, map, Map) {
return map_word(cage_base, kAcquireLoad).ToMap();
}
void HeapObject::set_map(Map value, ReleaseStoreTag tag) {
......@@ -781,11 +777,7 @@ ObjectSlot HeapObject::map_slot() const {
return ObjectSlot(MapField::address(*this));
}
MapWord HeapObject::map_word(RelaxedLoadTag tag) const {
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return HeapObject::map_word(cage_base, tag);
}
MapWord HeapObject::map_word(PtrComprCageBase cage_base, RelaxedLoadTag) const {
DEF_RELAXED_GETTER(HeapObject, map_word, MapWord) {
return MapField::Relaxed_Load_Map_Word(cage_base, *this);
}
......@@ -793,11 +785,7 @@ void HeapObject::set_map_word(MapWord map_word, RelaxedStoreTag) {
MapField::Relaxed_Store_Map_Word(*this, map_word);
}
MapWord HeapObject::map_word(AcquireLoadTag tag) const {
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return HeapObject::map_word(cage_base, tag);
}
MapWord HeapObject::map_word(PtrComprCageBase cage_base, AcquireLoadTag) const {
DEF_ACQUIRE_GETTER(HeapObject, map_word, MapWord) {
return MapField::Acquire_Load_No_Unpack(cage_base, *this);
}
......
......@@ -733,11 +733,10 @@ class IndexedReferencesExtractor : public ObjectVisitor {
VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
}
void VisitMapPointer(HeapObject object) override {
if (generator_->visited_fields_[0]) {
generator_->visited_fields_[0] = false;
} else {
VisitHeapObjectImpl(object.map(), 0);
}
// TODO(v8:11880): support external code space (here object could be Code,
// so the V8 heap cage_base must be used here).
PtrComprCageBase cage_base = GetPtrComprCageBase(object);
VisitSlotImpl(cage_base, object.map_slot());
}
void VisitPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) override {
......@@ -745,22 +744,17 @@ class IndexedReferencesExtractor : public ObjectVisitor {
// all the slots must point inside the object.
CHECK_LE(parent_start_, start);
CHECK_LE(end, parent_end_);
for (MaybeObjectSlot p = start; p < end; ++p) {
int field_index = static_cast<int>(p - parent_start_);
if (generator_->visited_fields_[field_index]) {
generator_->visited_fields_[field_index] = false;
continue;
}
HeapObject heap_object;
if ((*p)->GetHeapObject(&heap_object)) {
VisitHeapObjectImpl(heap_object, field_index);
}
PtrComprCageBase cage_base = GetPtrComprCageBase(host);
for (MaybeObjectSlot slot = start; slot < end; ++slot) {
VisitSlotImpl(cage_base, slot);
}
}
void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
VisitPointers(host, MaybeObjectSlot(slot), MaybeObjectSlot(slot + 1));
// TODO(v8:11880): support external code space.
PtrComprCageBase code_cage_base = GetPtrComprCageBase(host);
VisitSlotImpl(code_cage_base, slot);
}
void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
......@@ -773,6 +767,19 @@ class IndexedReferencesExtractor : public ObjectVisitor {
}
private:
template <typename TSlot>
V8_INLINE void VisitSlotImpl(PtrComprCageBase cage_base, TSlot slot) {
int field_index = static_cast<int>(MaybeObjectSlot(slot) - parent_start_);
if (generator_->visited_fields_[field_index]) {
generator_->visited_fields_[field_index] = false;
} else {
HeapObject heap_object;
if (slot.load(cage_base).GetHeapObject(&heap_object)) {
VisitHeapObjectImpl(heap_object, field_index);
}
}
}
V8_INLINE void VisitHeapObjectImpl(HeapObject heap_object, int field_index) {
DCHECK_LE(-1, field_index);
// The last parameter {field_offset} is only used to check some well-known
......
......@@ -893,7 +893,21 @@ void Serializer::ObjectSerializer::VisitPointers(HeapObject host,
void Serializer::ObjectSerializer::VisitCodePointer(HeapObject host,
CodeObjectSlot slot) {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
VisitPointers(host, ObjectSlot(slot), ObjectSlot(slot + 1));
// A version of VisitPointers() customized for CodeObjectSlot.
HandleScope scope(isolate());
DisallowGarbageCollection no_gc;
// TODO(v8:11880): support external code space.
PtrComprCageBase code_cage_base = GetPtrComprCageBase(host);
Object contents = slot.load(code_cage_base);
DCHECK(HAS_STRONG_HEAP_OBJECT_TAG(contents.ptr()));
DCHECK(contents.IsCode());
Handle<HeapObject> obj = handle(HeapObject::cast(contents), isolate());
if (!serializer_->SerializePendingObject(obj)) {
serializer_->SerializeObject(obj);
}
bytes_processed_so_far_ += kTaggedSize;
}
void Serializer::ObjectSerializer::OutputExternalReference(Address target,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment