Commit 7590b1cd authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Fix OLD_TO_SHARED remembered set and tests

Sufficiently full pages in new space are promoted as is to old space. If
a string is allocated on such a page, it won't be promoted to the shared
heap. The string can later be promoted by the next full GC, but then it
is promoted from old space, not new space, which was not supported.

Bug: v8:12612
Change-Id: I6133e13bec9ba3110b2b9dbfb4dcef47bde25e90
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3865162
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82906}
parent 960bac8d
...@@ -1757,8 +1757,6 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1757,8 +1757,6 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
AllocationResult allocation; AllocationResult allocation;
if (ShouldPromoteIntoSharedHeap(map)) { if (ShouldPromoteIntoSharedHeap(map)) {
DCHECK_EQ(target_space, OLD_SPACE); DCHECK_EQ(target_space, OLD_SPACE);
// TODO(v8:12612): Implement promotion from new space to shared heap.
DCHECK_IMPLIES(!FLAG_minor_mc, Heap::InYoungGeneration(object));
DCHECK_NOT_NULL(shared_old_allocator_); DCHECK_NOT_NULL(shared_old_allocator_);
allocation = shared_old_allocator_->AllocateRaw(size, alignment, allocation = shared_old_allocator_->AllocateRaw(size, alignment,
AllocationOrigin::kGC); AllocationOrigin::kGC);
...@@ -4712,11 +4710,11 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4712,11 +4710,11 @@ class RememberedSetUpdatingItem : public UpdatingItem {
private: private:
template <typename TSlot> template <typename TSlot>
inline void CheckOldToNewSlotForSharedUntyped(MemoryChunk* chunk, inline void CheckSlotForOldToSharedUntyped(PtrComprCageBase cage_base,
TSlot slot) { MemoryChunk* chunk, TSlot slot) {
HeapObject heap_object; HeapObject heap_object;
if (!(*slot).GetHeapObject(&heap_object)) { if (!slot.load(cage_base).GetHeapObject(&heap_object)) {
return; return;
} }
...@@ -4726,9 +4724,8 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4726,9 +4724,8 @@ class RememberedSetUpdatingItem : public UpdatingItem {
} }
} }
inline void CheckOldToNewSlotForSharedTyped(MemoryChunk* chunk, inline void CheckSlotForOldToSharedTyped(MemoryChunk* chunk,
SlotType slot_type, SlotType slot_type, Address addr) {
Address addr) {
HeapObject heap_object = HeapObject heap_object =
UpdateTypedSlotHelper::GetTargetObject(chunk->heap(), slot_type, addr); UpdateTypedSlotHelper::GetTargetObject(chunk->heap(), slot_type, addr);
...@@ -4798,6 +4795,8 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4798,6 +4795,8 @@ class RememberedSetUpdatingItem : public UpdatingItem {
} }
void UpdateUntypedPointers() { void UpdateUntypedPointers() {
const bool has_shared_isolate = this->heap_->isolate()->shared_isolate();
const PtrComprCageBase cage_base = heap_->isolate();
if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) { if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) {
// Marking bits are cleared already when the page is already swept. This // Marking bits are cleared already when the page is already swept. This
// is fine since in that case the sweeper has already removed dead invalid // is fine since in that case the sweeper has already removed dead invalid
...@@ -4809,16 +4808,15 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4809,16 +4808,15 @@ class RememberedSetUpdatingItem : public UpdatingItem {
: InvalidatedSlotsFilter::LivenessCheck::kNo; : InvalidatedSlotsFilter::LivenessCheck::kNo;
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter filter =
InvalidatedSlotsFilter::OldToNew(chunk_, liveness_check); InvalidatedSlotsFilter::OldToNew(chunk_, liveness_check);
const bool has_shared_isolate = this->heap_->isolate()->shared_isolate();
int slots = RememberedSet<OLD_TO_NEW>::Iterate( int slots = RememberedSet<OLD_TO_NEW>::Iterate(
chunk_, chunk_,
[this, &filter, has_shared_isolate](MaybeObjectSlot slot) { [this, &filter, has_shared_isolate, cage_base](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT; if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
SlotCallbackResult result = CheckAndUpdateOldToNewSlot(slot); SlotCallbackResult result = CheckAndUpdateOldToNewSlot(slot);
// A new space string might have been promoted into the shared heap // A new space string might have been promoted into the shared heap
// during GC. // during GC.
if (has_shared_isolate) { if (has_shared_isolate) {
CheckOldToNewSlotForSharedUntyped(chunk_, slot); CheckSlotForOldToSharedUntyped(cage_base, chunk_, slot);
} }
return result; return result;
}, },
...@@ -4841,12 +4839,16 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4841,12 +4839,16 @@ class RememberedSetUpdatingItem : public UpdatingItem {
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) { (chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld( InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(
chunk_, InvalidatedSlotsFilter::LivenessCheck::kNo); chunk_, InvalidatedSlotsFilter::LivenessCheck::kNo);
PtrComprCageBase cage_base = heap_->isolate();
RememberedSet<OLD_TO_OLD>::Iterate( RememberedSet<OLD_TO_OLD>::Iterate(
chunk_, chunk_,
[&filter, cage_base](MaybeObjectSlot slot) { [this, has_shared_isolate, &filter, cage_base](MaybeObjectSlot slot) {
if (filter.IsValid(slot.address())) { if (filter.IsValid(slot.address())) {
UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot); UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
// A string might have been promoted into the shared heap during
// GC.
if (has_shared_isolate) {
CheckSlotForOldToSharedUntyped(cage_base, chunk_, slot);
}
} }
// Always keep slot since all slots are dropped at once after // Always keep slot since all slots are dropped at once after
// iteration. // iteration.
...@@ -4909,9 +4911,9 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4909,9 +4911,9 @@ class RememberedSetUpdatingItem : public UpdatingItem {
} }
void UpdateTypedPointers() { void UpdateTypedPointers() {
const bool has_shared_isolate = heap_->isolate()->shared_isolate();
if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() !=
nullptr) { nullptr) {
const bool has_shared_isolate = heap_->isolate()->shared_isolate();
CHECK_NE(chunk_->owner(), heap_->map_space()); CHECK_NE(chunk_->owner(), heap_->map_space());
const auto check_and_update_old_to_new_slot_fn = const auto check_and_update_old_to_new_slot_fn =
[this](FullMaybeObjectSlot slot) { [this](FullMaybeObjectSlot slot) {
...@@ -4926,7 +4928,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4926,7 +4928,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
// A new space string might have been promoted into the shared heap // A new space string might have been promoted into the shared heap
// during GC. // during GC.
if (has_shared_isolate) { if (has_shared_isolate) {
CheckOldToNewSlotForSharedTyped(chunk_, slot_type, slot); CheckSlotForOldToSharedTyped(chunk_, slot_type, slot);
} }
return result; return result;
}); });
...@@ -4935,19 +4937,24 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4935,19 +4937,24 @@ class RememberedSetUpdatingItem : public UpdatingItem {
(chunk_->typed_slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != (chunk_->typed_slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() !=
nullptr)) { nullptr)) {
CHECK_NE(chunk_->owner(), heap_->map_space()); CHECK_NE(chunk_->owner(), heap_->map_space());
RememberedSet<OLD_TO_OLD>::IterateTyped(chunk_, [=](SlotType slot_type, RememberedSet<OLD_TO_OLD>::IterateTyped(
Address slot) { chunk_, [this, has_shared_isolate](SlotType slot_type, Address slot) {
// Using UpdateStrongSlot is OK here, because there are no weak // Using UpdateStrongSlot is OK here, because there are no weak
// typed slots. // typed slots.
PtrComprCageBase cage_base = heap_->isolate(); PtrComprCageBase cage_base = heap_->isolate();
return UpdateTypedSlotHelper::UpdateTypedSlot( SlotCallbackResult result = UpdateTypedSlotHelper::UpdateTypedSlot(
heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) { heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) {
UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot); UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
// Always keep slot since all slots are dropped at once after // Always keep slot since all slots are dropped at once after
// iteration. // iteration.
return KEEP_SLOT; return KEEP_SLOT;
}); });
}); // A string might have been promoted into the shared heap during GC.
if (has_shared_isolate) {
CheckSlotForOldToSharedTyped(chunk_, slot_type, slot);
}
return result;
});
chunk_->ReleaseTypedSlotSet<OLD_TO_OLD>(); chunk_->ReleaseTypedSlotSet<OLD_TO_OLD>();
} }
} }
......
...@@ -763,7 +763,7 @@ class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace { ...@@ -763,7 +763,7 @@ class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace {
return paged_space_.GetObjectIterator(heap); return paged_space_.GetObjectIterator(heap);
} }
bool ShouldBePromoted(Address address) const final { return false; } bool ShouldBePromoted(Address address) const final { return true; }
void EvacuatePrologue() final { paged_space_.EvacuatePrologue(); } void EvacuatePrologue() final { paged_space_.EvacuatePrologue(); }
void EvacuateEpilogue() final { paged_space_.EvacuateEpilogue(); } void EvacuateEpilogue() final { paged_space_.EvacuateEpilogue(); }
......
...@@ -740,6 +740,11 @@ UNINITIALIZED_TEST(PromotionScavenge) { ...@@ -740,6 +740,11 @@ UNINITIALIZED_TEST(PromotionScavenge) {
} }
UNINITIALIZED_TEST(PromotionScavengeOldToShared) { UNINITIALIZED_TEST(PromotionScavengeOldToShared) {
if (FLAG_minor_mc) {
// Promoting from new space directly to shared heap is not implemented in
// MinorMC.
return;
}
if (FLAG_single_generation) return; if (FLAG_single_generation) return;
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return; if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return; if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
...@@ -789,7 +794,7 @@ UNINITIALIZED_TEST(PromotionScavengeOldToShared) { ...@@ -789,7 +794,7 @@ UNINITIALIZED_TEST(PromotionScavengeOldToShared) {
} }
} }
UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) { UNINITIALIZED_TEST(PromotionMarkCompactNewToShared) {
if (FLAG_single_generation) return; if (FLAG_single_generation) return;
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return; if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return; if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
...@@ -797,6 +802,7 @@ UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) { ...@@ -797,6 +802,7 @@ UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) {
FLAG_shared_string_table = true; FLAG_shared_string_table = true;
FLAG_manual_evacuation_candidates_selection = true; FLAG_manual_evacuation_candidates_selection = true;
FLAG_page_promotion = false;
MultiClientIsolateTest test; MultiClientIsolateTest test;
Isolate* i_isolate = test.i_main_isolate(); Isolate* i_isolate = test.i_main_isolate();
...@@ -838,6 +844,63 @@ UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) { ...@@ -838,6 +844,63 @@ UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) {
} }
} }
UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) {
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
if (FLAG_stress_concurrent_allocation) return;
if (!FLAG_page_promotion) return;
FLAG_shared_string_table = true;
FLAG_manual_evacuation_candidates_selection = true;
MultiClientIsolateTest test;
Isolate* i_isolate = test.i_main_isolate();
Factory* factory = i_isolate->factory();
Heap* heap = i_isolate->heap();
ManualGCScope manual_gc(i_isolate);
const char raw_one_byte[] = "foo";
{
HandleScope scope(i_isolate);
Handle<FixedArray> old_object =
factory->NewFixedArray(1, AllocationType::kOld);
MemoryChunk* old_object_chunk = MemoryChunk::FromHeapObject(*old_object);
CHECK(!old_object_chunk->InYoungGeneration());
Handle<String> one_byte_seq = factory->NewStringFromAsciiChecked(
raw_one_byte, AllocationType::kYoung);
CHECK(String::IsInPlaceInternalizable(*one_byte_seq));
CHECK(MemoryChunk::FromHeapObject(*one_byte_seq)->InYoungGeneration());
// Fill the page and do a full GC. Page promotion should kick in and promote
// the page as is to old space.
heap::FillCurrentPage(heap->new_space());
heap->CollectGarbage(OLD_SPACE, GarbageCollectionReason::kTesting);
// Make sure 'one_byte_seq' is in old space.
CHECK(!MemoryChunk::FromHeapObject(*one_byte_seq)->InYoungGeneration());
CHECK(heap->Contains(*one_byte_seq));
old_object->set(0, *one_byte_seq);
ObjectSlot slot = old_object->GetFirstElementAddress();
CHECK(
!RememberedSet<OLD_TO_NEW>::Contains(old_object_chunk, slot.address()));
heap::ForceEvacuationCandidate(Page::FromHeapObject(*one_byte_seq));
heap->CollectGarbage(OLD_SPACE, GarbageCollectionReason::kTesting);
// In-place-internalizable strings are promoted into the shared heap when
// sharing.
CHECK(!heap->Contains(*one_byte_seq));
CHECK(heap->SharedHeapContains(*one_byte_seq));
// Since the GC promoted that string into shared heap, it also needs to
// create an OLD_TO_SHARED slot.
CHECK(RememberedSet<OLD_TO_SHARED>::Contains(old_object_chunk,
slot.address()));
}
}
UNINITIALIZED_TEST(PagePromotionRecordingOldToShared) { UNINITIALIZED_TEST(PagePromotionRecordingOldToShared) {
if (FLAG_single_generation) return; if (FLAG_single_generation) return;
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return; if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment