Commit 4dd04c0b authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Clean up invalidated OLD_TO_SHARED slots

With this CL OLD_TO_SHARED slots aren't removed at the end of full GC
anymore. In order to allow for this, invalidated slots need to be
filtered out when iterating the OLD_TO_SHARED remembered set.

* When invalidating slots in an object, that object also needs to be
  recorded for OLD_TO_SHARED.
* The sweeper has to remove invalidated objects in free memory when
  sweeping during a full GC.
* OLD_TO_SHARED slots need to be removed in the evacuated start of
  a page when evacuation fails.
* While local GCs don't need OLD_TO_SHARED, slots need to be filtered
  in order to be able to delete the set of invalidated objects during
  a GC.

Bug: v8:11708
Change-Id: I594307289a797bc0d68edf6793b914805d1285df
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3584113Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80018}
parent a7fcaa5a
...@@ -22,6 +22,11 @@ InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToNew(MemoryChunk* chunk) { ...@@ -22,6 +22,11 @@ InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToNew(MemoryChunk* chunk) {
OLD_TO_NEW); OLD_TO_NEW);
} }
InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToShared(MemoryChunk* chunk) {
return InvalidatedSlotsFilter(
chunk, chunk->invalidated_slots<OLD_TO_SHARED>(), OLD_TO_SHARED);
}
InvalidatedSlotsFilter::InvalidatedSlotsFilter( InvalidatedSlotsFilter::InvalidatedSlotsFilter(
MemoryChunk* chunk, InvalidatedSlots* invalidated_slots, MemoryChunk* chunk, InvalidatedSlots* invalidated_slots,
RememberedSetType remembered_set_type) { RememberedSetType remembered_set_type) {
...@@ -48,6 +53,12 @@ InvalidatedSlotsCleanup InvalidatedSlotsCleanup::OldToNew(MemoryChunk* chunk) { ...@@ -48,6 +53,12 @@ InvalidatedSlotsCleanup InvalidatedSlotsCleanup::OldToNew(MemoryChunk* chunk) {
return InvalidatedSlotsCleanup(chunk, chunk->invalidated_slots<OLD_TO_NEW>()); return InvalidatedSlotsCleanup(chunk, chunk->invalidated_slots<OLD_TO_NEW>());
} }
InvalidatedSlotsCleanup InvalidatedSlotsCleanup::OldToShared(
MemoryChunk* chunk) {
return InvalidatedSlotsCleanup(chunk,
chunk->invalidated_slots<OLD_TO_SHARED>());
}
InvalidatedSlotsCleanup InvalidatedSlotsCleanup::NoCleanup(MemoryChunk* chunk) { InvalidatedSlotsCleanup InvalidatedSlotsCleanup::NoCleanup(MemoryChunk* chunk) {
return InvalidatedSlotsCleanup(chunk, nullptr); return InvalidatedSlotsCleanup(chunk, nullptr);
} }
......
...@@ -33,6 +33,7 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter { ...@@ -33,6 +33,7 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
public: public:
static InvalidatedSlotsFilter OldToOld(MemoryChunk* chunk); static InvalidatedSlotsFilter OldToOld(MemoryChunk* chunk);
static InvalidatedSlotsFilter OldToNew(MemoryChunk* chunk); static InvalidatedSlotsFilter OldToNew(MemoryChunk* chunk);
static InvalidatedSlotsFilter OldToShared(MemoryChunk* chunk);
inline bool IsValid(Address slot); inline bool IsValid(Address slot);
...@@ -60,6 +61,7 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter { ...@@ -60,6 +61,7 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter {
class V8_EXPORT_PRIVATE InvalidatedSlotsCleanup { class V8_EXPORT_PRIVATE InvalidatedSlotsCleanup {
public: public:
static InvalidatedSlotsCleanup OldToNew(MemoryChunk* chunk); static InvalidatedSlotsCleanup OldToNew(MemoryChunk* chunk);
static InvalidatedSlotsCleanup OldToShared(MemoryChunk* chunk);
static InvalidatedSlotsCleanup NoCleanup(MemoryChunk* chunk); static InvalidatedSlotsCleanup NoCleanup(MemoryChunk* chunk);
explicit InvalidatedSlotsCleanup(MemoryChunk* chunk, explicit InvalidatedSlotsCleanup(MemoryChunk* chunk,
......
...@@ -3514,7 +3514,7 @@ void MarkCompactCollector::EvacuateEpilogue() { ...@@ -3514,7 +3514,7 @@ void MarkCompactCollector::EvacuateEpilogue() {
heap()->new_lo_space()->FreeUnmarkedObjects(); heap()->new_lo_space()->FreeUnmarkedObjects();
} }
// Old space. Deallocate evacuated candidate pages. // Old generation. Deallocate evacuated candidate pages.
ReleaseEvacuationCandidates(); ReleaseEvacuationCandidates();
// Give pages that are queued to be freed back to the OS. // Give pages that are queued to be freed back to the OS.
...@@ -3528,10 +3528,16 @@ void MarkCompactCollector::EvacuateEpilogue() { ...@@ -3528,10 +3528,16 @@ void MarkCompactCollector::EvacuateEpilogue() {
// Old-to-old slot sets must be empty after evacuation. // Old-to-old slot sets must be empty after evacuation.
DCHECK_NULL((chunk->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>())); DCHECK_NULL((chunk->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
DCHECK_NULL((chunk->slot_set<OLD_TO_SHARED, AccessMode::NON_ATOMIC>()));
DCHECK_NULL((chunk->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>())); DCHECK_NULL((chunk->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
// Old-to-new slot sets must be empty after evacuation.
DCHECK_NULL((chunk->slot_set<OLD_TO_NEW, AccessMode::ATOMIC>()));
DCHECK_NULL((chunk->typed_slot_set<OLD_TO_NEW, AccessMode::ATOMIC>()));
// GCs need to filter invalidated slots.
DCHECK_NULL(chunk->invalidated_slots<OLD_TO_OLD>()); DCHECK_NULL(chunk->invalidated_slots<OLD_TO_OLD>());
DCHECK_NULL(chunk->invalidated_slots<OLD_TO_NEW>()); DCHECK_NULL(chunk->invalidated_slots<OLD_TO_NEW>());
DCHECK_NULL(chunk->invalidated_slots<OLD_TO_SHARED>());
} }
#endif #endif
} }
...@@ -4452,6 +4458,21 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -4452,6 +4458,21 @@ class RememberedSetUpdatingItem : public UpdatingItem {
// processsed, but since there are no invalidated OLD_TO_CODE slots, // processsed, but since there are no invalidated OLD_TO_CODE slots,
// there's nothing to clear. // there's nothing to clear.
} }
if (updating_mode_ == RememberedSetUpdatingMode::ALL) {
if (chunk_->slot_set<OLD_TO_SHARED, AccessMode::NON_ATOMIC>()) {
// Client GCs need to remove invalidated OLD_TO_SHARED slots.
DCHECK(!heap_->IsShared());
InvalidatedSlotsFilter filter =
InvalidatedSlotsFilter::OldToShared(chunk_);
RememberedSet<OLD_TO_SHARED>::Iterate(
chunk_,
[&filter](MaybeObjectSlot slot) {
return filter.IsValid(slot.address()) ? KEEP_SLOT : REMOVE_SLOT;
},
SlotSet::FREE_EMPTY_BUCKETS);
}
chunk_->ReleaseInvalidatedSlots<OLD_TO_SHARED>();
}
} }
void UpdateTypedPointers() { void UpdateTypedPointers() {
...@@ -4517,13 +4538,20 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems( ...@@ -4517,13 +4538,20 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
const bool contains_old_to_new_slots = const bool contains_old_to_new_slots =
chunk->slot_set<OLD_TO_NEW>() != nullptr || chunk->slot_set<OLD_TO_NEW>() != nullptr ||
chunk->typed_slot_set<OLD_TO_NEW>() != nullptr; chunk->typed_slot_set<OLD_TO_NEW>() != nullptr;
const bool contains_old_to_shared_slots =
chunk->slot_set<OLD_TO_SHARED>() != nullptr ||
chunk->typed_slot_set<OLD_TO_SHARED>() != nullptr;
const bool contains_old_to_old_invalidated_slots = const bool contains_old_to_old_invalidated_slots =
chunk->invalidated_slots<OLD_TO_OLD>() != nullptr; chunk->invalidated_slots<OLD_TO_OLD>() != nullptr;
const bool contains_old_to_new_invalidated_slots = const bool contains_old_to_new_invalidated_slots =
chunk->invalidated_slots<OLD_TO_NEW>() != nullptr; chunk->invalidated_slots<OLD_TO_NEW>() != nullptr;
const bool contains_old_to_shared_invalidated_slots =
chunk->invalidated_slots<OLD_TO_SHARED>() != nullptr;
if (!contains_old_to_new_slots && !contains_old_to_old_slots && if (!contains_old_to_new_slots && !contains_old_to_old_slots &&
!contains_old_to_old_invalidated_slots && !contains_old_to_old_invalidated_slots &&
!contains_old_to_new_invalidated_slots && !contains_old_to_code_slots) !contains_old_to_new_invalidated_slots && !contains_old_to_code_slots &&
!contains_old_to_shared_slots &&
!contains_old_to_shared_invalidated_slots)
continue; continue;
if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots || if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots ||
contains_old_to_old_invalidated_slots || contains_old_to_old_invalidated_slots ||
...@@ -4668,14 +4696,16 @@ void MarkCompactCollector::UpdatePointersInClientHeap(Isolate* client) { ...@@ -4668,14 +4696,16 @@ void MarkCompactCollector::UpdatePointersInClientHeap(Isolate* client) {
MemoryChunk* chunk = chunk_iterator.Next(); MemoryChunk* chunk = chunk_iterator.Next();
CodePageMemoryModificationScope unprotect_code_page(chunk); CodePageMemoryModificationScope unprotect_code_page(chunk);
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToShared(chunk);
RememberedSet<OLD_TO_SHARED>::Iterate( RememberedSet<OLD_TO_SHARED>::Iterate(
chunk, chunk,
[cage_base](MaybeObjectSlot slot) { [cage_base, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot); return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
}, },
SlotSet::KEEP_EMPTY_BUCKETS); SlotSet::FREE_EMPTY_BUCKETS);
chunk->ReleaseSlotSet<OLD_TO_SHARED>(); chunk->ReleaseInvalidatedSlots<OLD_TO_SHARED>();
RememberedSet<OLD_TO_SHARED>::IterateTyped( RememberedSet<OLD_TO_SHARED>::IterateTyped(
chunk, [this](SlotType slot_type, Address slot) { chunk, [this](SlotType slot_type, Address slot) {
...@@ -4688,8 +4718,6 @@ void MarkCompactCollector::UpdatePointersInClientHeap(Isolate* client) { ...@@ -4688,8 +4718,6 @@ void MarkCompactCollector::UpdatePointersInClientHeap(Isolate* client) {
slot); slot);
}); });
}); });
chunk->ReleaseTypedSlotSet<OLD_TO_SHARED>();
} }
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
...@@ -4736,11 +4764,20 @@ void ReRecordPage( ...@@ -4736,11 +4764,20 @@ void ReRecordPage(
RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(), RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, page->address(),
failed_start); failed_start);
RememberedSet<OLD_TO_SHARED>::RemoveRange(page, page->address(), failed_start,
SlotSet::FREE_EMPTY_BUCKETS);
RememberedSet<OLD_TO_SHARED>::RemoveRangeTyped(page, page->address(),
failed_start);
// Remove invalidated slots. // Remove invalidated slots.
if (failed_start > page->area_start()) { if (failed_start > page->area_start()) {
InvalidatedSlotsCleanup old_to_new_cleanup = InvalidatedSlotsCleanup old_to_new_cleanup =
InvalidatedSlotsCleanup::OldToNew(page); InvalidatedSlotsCleanup::OldToNew(page);
old_to_new_cleanup.Free(page->area_start(), failed_start); old_to_new_cleanup.Free(page->area_start(), failed_start);
InvalidatedSlotsCleanup old_to_shared_cleanup =
InvalidatedSlotsCleanup::OldToShared(page);
old_to_shared_cleanup.Free(page->area_start(), failed_start);
} }
// Recompute live bytes. // Recompute live bytes.
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
#include "src/heap/memory-chunk.h" #include "src/heap/memory-chunk.h"
#include "src/base/logging.h"
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/base/platform/wrappers.h" #include "src/base/platform/wrappers.h"
#include "src/common/globals.h" #include "src/common/globals.h"
...@@ -141,6 +142,7 @@ MemoryChunk::MemoryChunk(Heap* heap, BaseSpace* space, size_t chunk_size, ...@@ -141,6 +142,7 @@ MemoryChunk::MemoryChunk(Heap* heap, BaseSpace* space, size_t chunk_size,
// Not actually used but initialize anyway for predictability. // Not actually used but initialize anyway for predictability.
invalidated_slots_[OLD_TO_CODE] = nullptr; invalidated_slots_[OLD_TO_CODE] = nullptr;
} }
invalidated_slots_[OLD_TO_SHARED] = nullptr;
progress_bar_.Initialize(); progress_bar_.Initialize();
set_concurrent_sweeping_state(ConcurrentSweepingState::kDone); set_concurrent_sweeping_state(ConcurrentSweepingState::kDone);
page_protection_change_mutex_ = new base::Mutex(); page_protection_change_mutex_ = new base::Mutex();
...@@ -245,10 +247,13 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() { ...@@ -245,10 +247,13 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
ReleaseSlotSet<OLD_TO_NEW>(); ReleaseSlotSet<OLD_TO_NEW>();
ReleaseSlotSet<OLD_TO_OLD>(); ReleaseSlotSet<OLD_TO_OLD>();
if (V8_EXTERNAL_CODE_SPACE_BOOL) ReleaseSlotSet<OLD_TO_CODE>(); if (V8_EXTERNAL_CODE_SPACE_BOOL) ReleaseSlotSet<OLD_TO_CODE>();
ReleaseSlotSet<OLD_TO_SHARED>();
ReleaseTypedSlotSet<OLD_TO_NEW>(); ReleaseTypedSlotSet<OLD_TO_NEW>();
ReleaseTypedSlotSet<OLD_TO_OLD>(); ReleaseTypedSlotSet<OLD_TO_OLD>();
ReleaseTypedSlotSet<OLD_TO_SHARED>();
ReleaseInvalidatedSlots<OLD_TO_NEW>(); ReleaseInvalidatedSlots<OLD_TO_NEW>();
ReleaseInvalidatedSlots<OLD_TO_OLD>(); ReleaseInvalidatedSlots<OLD_TO_OLD>();
ReleaseInvalidatedSlots<OLD_TO_SHARED>();
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap(); if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
...@@ -348,6 +353,7 @@ InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots() { ...@@ -348,6 +353,7 @@ InvalidatedSlots* MemoryChunk::AllocateInvalidatedSlots() {
template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_NEW>(); template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_NEW>();
template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_OLD>(); template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_OLD>();
template void MemoryChunk::ReleaseInvalidatedSlots<OLD_TO_SHARED>();
template <RememberedSetType type> template <RememberedSetType type>
void MemoryChunk::ReleaseInvalidatedSlots() { void MemoryChunk::ReleaseInvalidatedSlots() {
...@@ -361,15 +367,28 @@ template V8_EXPORT_PRIVATE void ...@@ -361,15 +367,28 @@ template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object); MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(HeapObject object);
template V8_EXPORT_PRIVATE void template V8_EXPORT_PRIVATE void
MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object); MemoryChunk::RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(HeapObject object);
template V8_EXPORT_PRIVATE void MemoryChunk::RegisterObjectWithInvalidatedSlots<
OLD_TO_SHARED>(HeapObject object);
template <RememberedSetType type> template <RememberedSetType type>
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) { void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object) {
bool skip_slot_recording; bool skip_slot_recording;
if (type == OLD_TO_NEW) { switch (type) {
skip_slot_recording = InYoungGeneration(); case OLD_TO_NEW:
} else { skip_slot_recording = InYoungGeneration();
skip_slot_recording = ShouldSkipEvacuationSlotRecording(); break;
case OLD_TO_OLD:
skip_slot_recording = ShouldSkipEvacuationSlotRecording();
break;
case OLD_TO_SHARED:
skip_slot_recording = InYoungGeneration();
break;
default:
UNREACHABLE();
} }
if (skip_slot_recording) { if (skip_slot_recording) {
...@@ -391,8 +410,13 @@ void MemoryChunk::InvalidateRecordedSlots(HeapObject object) { ...@@ -391,8 +410,13 @@ void MemoryChunk::InvalidateRecordedSlots(HeapObject object) {
RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object); RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object);
} }
if (slot_set_[OLD_TO_NEW] != nullptr) if (slot_set_[OLD_TO_NEW] != nullptr) {
RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object); RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object);
}
if (slot_set_[OLD_TO_SHARED] != nullptr) {
RegisterObjectWithInvalidatedSlots<OLD_TO_SHARED>(object);
}
} }
template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>( template bool MemoryChunk::RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(
......
...@@ -254,7 +254,8 @@ V8_INLINE size_t Sweeper::FreeAndProcessFreedMemory( ...@@ -254,7 +254,8 @@ V8_INLINE size_t Sweeper::FreeAndProcessFreedMemory(
V8_INLINE void Sweeper::CleanupRememberedSetEntriesForFreedMemory( V8_INLINE void Sweeper::CleanupRememberedSetEntriesForFreedMemory(
Address free_start, Address free_end, Page* page, bool record_free_ranges, Address free_start, Address free_end, Page* page, bool record_free_ranges,
TypedSlotSet::FreeRangesMap* free_ranges_map, SweepingMode sweeping_mode, TypedSlotSet::FreeRangesMap* free_ranges_map, SweepingMode sweeping_mode,
InvalidatedSlotsCleanup* old_to_new_cleanup) { InvalidatedSlotsCleanup* invalidated_old_to_new_cleanup,
InvalidatedSlotsCleanup* invalidated_old_to_shared_cleanup) {
DCHECK_LE(free_start, free_end); DCHECK_LE(free_start, free_end);
if (sweeping_mode == SweepingMode::kEagerDuringGC) { if (sweeping_mode == SweepingMode::kEagerDuringGC) {
// New space and in consequence the old-to-new remembered set is always // New space and in consequence the old-to-new remembered set is always
...@@ -284,7 +285,8 @@ V8_INLINE void Sweeper::CleanupRememberedSetEntriesForFreedMemory( ...@@ -284,7 +285,8 @@ V8_INLINE void Sweeper::CleanupRememberedSetEntriesForFreedMemory(
static_cast<uint32_t>(free_end - page->address()))); static_cast<uint32_t>(free_end - page->address())));
} }
old_to_new_cleanup->Free(free_start, free_end); invalidated_old_to_new_cleanup->Free(free_start, free_end);
invalidated_old_to_shared_cleanup->Free(free_start, free_end);
} }
void Sweeper::CleanupInvalidTypedSlotsOfFreeRanges( void Sweeper::CleanupInvalidTypedSlotsOfFreeRanges(
...@@ -368,13 +370,18 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, ...@@ -368,13 +370,18 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
p->typed_slot_set<OLD_TO_SHARED>() != nullptr || p->typed_slot_set<OLD_TO_SHARED>() != nullptr ||
DEBUG_BOOL; DEBUG_BOOL;
// Clean invalidated slots during the final atomic pause. After resuming // Clean invalidated slots in free memory during the final atomic pause. After
// execution this isn't necessary, invalid old-to-new refs were already // resuming execution this isn't necessary, invalid slots were already removed
// removed by mark compact's update pointers phase. // by mark compact's update pointers phase. So there are no invalid slots left
InvalidatedSlotsCleanup old_to_new_cleanup = // in free memory.
InvalidatedSlotsCleanup invalidated_old_to_new_cleanup =
InvalidatedSlotsCleanup::NoCleanup(p); InvalidatedSlotsCleanup::NoCleanup(p);
if (sweeping_mode == SweepingMode::kEagerDuringGC) InvalidatedSlotsCleanup invalidated_old_to_shared_cleanup =
old_to_new_cleanup = InvalidatedSlotsCleanup::OldToNew(p); InvalidatedSlotsCleanup::NoCleanup(p);
if (sweeping_mode == SweepingMode::kEagerDuringGC) {
invalidated_old_to_new_cleanup = InvalidatedSlotsCleanup::OldToNew(p);
invalidated_old_to_shared_cleanup = InvalidatedSlotsCleanup::OldToShared(p);
}
// The free ranges map is used for filtering typed slots. // The free ranges map is used for filtering typed slots.
TypedSlotSet::FreeRangesMap free_ranges_map; TypedSlotSet::FreeRangesMap free_ranges_map;
...@@ -401,7 +408,8 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, ...@@ -401,7 +408,8 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
free_list_mode, free_space_mode)); free_list_mode, free_space_mode));
CleanupRememberedSetEntriesForFreedMemory( CleanupRememberedSetEntriesForFreedMemory(
free_start, free_end, p, record_free_ranges, &free_ranges_map, free_start, free_end, p, record_free_ranges, &free_ranges_map,
sweeping_mode, &old_to_new_cleanup); sweeping_mode, &invalidated_old_to_new_cleanup,
&invalidated_old_to_shared_cleanup);
} }
Map map = object.map(cage_base, kAcquireLoad); Map map = object.map(cage_base, kAcquireLoad);
DCHECK(MarkCompactCollector::IsMapOrForwarded(map)); DCHECK(MarkCompactCollector::IsMapOrForwarded(map));
...@@ -429,7 +437,8 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode, ...@@ -429,7 +437,8 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
free_list_mode, free_space_mode)); free_list_mode, free_space_mode));
CleanupRememberedSetEntriesForFreedMemory( CleanupRememberedSetEntriesForFreedMemory(
free_start, free_end, p, record_free_ranges, &free_ranges_map, free_start, free_end, p, record_free_ranges, &free_ranges_map,
sweeping_mode, &old_to_new_cleanup); sweeping_mode, &invalidated_old_to_new_cleanup,
&invalidated_old_to_shared_cleanup);
} }
// Phase 3: Post process the page. // Phase 3: Post process the page.
......
...@@ -144,7 +144,8 @@ class Sweeper { ...@@ -144,7 +144,8 @@ class Sweeper {
void CleanupRememberedSetEntriesForFreedMemory( void CleanupRememberedSetEntriesForFreedMemory(
Address free_start, Address free_end, Page* page, bool record_free_ranges, Address free_start, Address free_end, Page* page, bool record_free_ranges,
TypedSlotSet::FreeRangesMap* free_ranges_map, SweepingMode sweeping_mode, TypedSlotSet::FreeRangesMap* free_ranges_map, SweepingMode sweeping_mode,
InvalidatedSlotsCleanup* old_to_new_cleanup); InvalidatedSlotsCleanup* invalidated_old_to_new_cleanup,
InvalidatedSlotsCleanup* invalidated_old_to_shared_cleanup);
// Helper function for RawSweep. Clears invalid typed slots in the given free // Helper function for RawSweep. Clears invalid typed slots in the given free
// ranges. // ranges.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment