Commit 167a8946 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Split old-to-new remembered set

Split OLD_TO_NEW remembered set and add OLD_TO_NEW_SWEEPING. The
OLD_TO_NEW remembered set is moved to OLD_TO_NEW_SWEEPING during
mark-compact. OLD_TO_NEW_SWEEPING is then modified by the sweeper.
Before using the page again, OLD_TO_NEW and OLD_TO_NEW_SWEEPING are
merged again.

This means only the main thread modifies OLD_TO_NEW, the sweeper only
removes entries from OLD_TO_NEW_SWEEPING. We can use this property
to make accesses non-atomic in a subsequent CL.

Bug: v8:9454
Change-Id: I9057cf85818d647775ae4c7beec4c8ccf73e18f7
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1771783Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63807}
parent 97c89ebb
...@@ -4115,6 +4115,17 @@ void CollectSlots(MemoryChunk* chunk, Address start, Address end, ...@@ -4115,6 +4115,17 @@ void CollectSlots(MemoryChunk* chunk, Address start, Address end,
return KEEP_SLOT; return KEEP_SLOT;
}, },
SlotSet::PREFREE_EMPTY_BUCKETS); SlotSet::PREFREE_EMPTY_BUCKETS);
if (direction == OLD_TO_NEW) {
RememberedSetSweeping::Iterate(
chunk,
[start, end, untyped](MaybeObjectSlot slot) {
if (start <= slot.address() && slot.address() < end) {
untyped->insert(slot.address());
}
return KEEP_SLOT;
},
SlotSet::PREFREE_EMPTY_BUCKETS);
}
RememberedSet<direction>::IterateTyped( RememberedSet<direction>::IterateTyped(
chunk, [=](SlotType type, Address slot) { chunk, [=](SlotType type, Address slot) {
if (start <= slot && slot < end) { if (start <= slot && slot < end) {
...@@ -5547,8 +5558,10 @@ void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) { ...@@ -5547,8 +5558,10 @@ void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
Page* page = Page::FromAddress(slot.address()); Page* page = Page::FromAddress(slot.address());
if (!page->InYoungGeneration()) { if (!page->InYoungGeneration()) {
DCHECK_EQ(page->owner_identity(), OLD_SPACE); DCHECK_EQ(page->owner_identity(), OLD_SPACE);
store_buffer()->MoveAllEntriesToRememberedSet(); store_buffer()->MoveAllEntriesToRememberedSet();
RememberedSet<OLD_TO_NEW>::Remove(page, slot.address()); RememberedSet<OLD_TO_NEW>::Remove(page, slot.address());
RememberedSetSweeping::Remove(page, slot.address());
} }
#endif #endif
} }
...@@ -5561,8 +5574,9 @@ void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) { ...@@ -5561,8 +5574,9 @@ void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
Page* page = Page::FromAddress(slot.address()); Page* page = Page::FromAddress(slot.address());
DCHECK_EQ(page->owner_identity(), OLD_SPACE); DCHECK_EQ(page->owner_identity(), OLD_SPACE);
store_buffer()->MoveAllEntriesToRememberedSet(); store_buffer()->MoveAllEntriesToRememberedSet();
CHECK(!RememberedSet<OLD_TO_NEW>::Contains(page, slot.address())); // Slots are filtered with invalidated slots.
// Old to old slots are filtered with invalidated slots. CHECK_IMPLIES(RememberedSet<OLD_TO_NEW>::Contains(page, slot.address()),
page->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
CHECK_IMPLIES(RememberedSet<OLD_TO_OLD>::Contains(page, slot.address()), CHECK_IMPLIES(RememberedSet<OLD_TO_OLD>::Contains(page, slot.address()),
page->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object)); page->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
#endif #endif
...@@ -5575,9 +5589,12 @@ void Heap::ClearRecordedSlotRange(Address start, Address end) { ...@@ -5575,9 +5589,12 @@ void Heap::ClearRecordedSlotRange(Address start, Address end) {
DCHECK(!page->IsLargePage()); DCHECK(!page->IsLargePage());
if (!page->InYoungGeneration()) { if (!page->InYoungGeneration()) {
DCHECK_EQ(page->owner_identity(), OLD_SPACE); DCHECK_EQ(page->owner_identity(), OLD_SPACE);
store_buffer()->MoveAllEntriesToRememberedSet(); store_buffer()->MoveAllEntriesToRememberedSet();
RememberedSet<OLD_TO_NEW>::RemoveRange(page, start, end, RememberedSet<OLD_TO_NEW>::RemoveRange(page, start, end,
SlotSet::KEEP_EMPTY_BUCKETS); SlotSet::KEEP_EMPTY_BUCKETS);
RememberedSetSweeping::RemoveRange(page, start, end,
SlotSet::KEEP_EMPTY_BUCKETS);
} }
#endif #endif
} }
......
...@@ -855,7 +855,6 @@ class Heap { ...@@ -855,7 +855,6 @@ class Heap {
static Address store_buffer_overflow_function_address(); static Address store_buffer_overflow_function_address();
void MoveStoreBufferEntriesToRememberedSet(); void MoveStoreBufferEntriesToRememberedSet();
void ClearRecordedSlot(HeapObject object, ObjectSlot slot); void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
void ClearRecordedSlotRange(Address start, Address end); void ClearRecordedSlotRange(Address start, Address end);
......
...@@ -3421,6 +3421,17 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -3421,6 +3421,17 @@ class RememberedSetUpdatingItem : public UpdatingItem {
SlotSet::PREFREE_EMPTY_BUCKETS); SlotSet::PREFREE_EMPTY_BUCKETS);
} }
if (chunk_->sweeping_slot_set<AccessMode::NON_ATOMIC>()) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew(chunk_);
RememberedSetSweeping::Iterate(
chunk_,
[this, &filter](MaybeObjectSlot slot) {
CHECK(filter.IsValid(slot.address()));
return CheckAndUpdateOldToNewSlot(slot);
},
SlotSet::PREFREE_EMPTY_BUCKETS);
}
if (chunk_->invalidated_slots<OLD_TO_NEW>() != nullptr) { if (chunk_->invalidated_slots<OLD_TO_NEW>() != nullptr) {
// The invalidated slots are not needed after old-to-new slots were // The invalidated slots are not needed after old-to-new slots were
// processed. // processed.
...@@ -3437,6 +3448,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -3437,6 +3448,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
return UpdateSlot<AccessMode::NON_ATOMIC>(slot); return UpdateSlot<AccessMode::NON_ATOMIC>(slot);
}, },
SlotSet::PREFREE_EMPTY_BUCKETS); SlotSet::PREFREE_EMPTY_BUCKETS);
chunk_->ReleaseSlotSet<OLD_TO_OLD>();
} }
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) && if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
chunk_->invalidated_slots<OLD_TO_OLD>() != nullptr) { chunk_->invalidated_slots<OLD_TO_OLD>() != nullptr) {
...@@ -3556,15 +3568,18 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems( ...@@ -3556,15 +3568,18 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
const bool contains_old_to_new_slots = const bool contains_old_to_new_slots =
chunk->slot_set<OLD_TO_NEW>() != nullptr || chunk->slot_set<OLD_TO_NEW>() != nullptr ||
chunk->typed_slot_set<OLD_TO_NEW>() != nullptr; chunk->typed_slot_set<OLD_TO_NEW>() != nullptr;
const bool contains_old_to_new_sweeping_slots =
chunk->sweeping_slot_set() != nullptr;
const bool contains_old_to_old_invalidated_slots = const bool contains_old_to_old_invalidated_slots =
chunk->invalidated_slots<OLD_TO_OLD>() != nullptr; chunk->invalidated_slots<OLD_TO_OLD>() != nullptr;
const bool contains_old_to_new_invalidated_slots = const bool contains_old_to_new_invalidated_slots =
chunk->invalidated_slots<OLD_TO_NEW>() != nullptr; chunk->invalidated_slots<OLD_TO_NEW>() != nullptr;
if (!contains_old_to_new_slots && !contains_old_to_old_slots && if (!contains_old_to_new_slots && !contains_old_to_new_sweeping_slots &&
!contains_old_to_old_invalidated_slots && !contains_old_to_old_slots && !contains_old_to_old_invalidated_slots &&
!contains_old_to_new_invalidated_slots) !contains_old_to_new_invalidated_slots)
continue; continue;
if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots || if (mode == RememberedSetUpdatingMode::ALL || contains_old_to_new_slots ||
contains_old_to_new_sweeping_slots ||
contains_old_to_old_invalidated_slots || contains_old_to_old_invalidated_slots ||
contains_old_to_new_invalidated_slots) { contains_old_to_new_invalidated_slots) {
job->AddItem(CreateRememberedSetUpdatingItem(chunk, mode)); job->AddItem(CreateRememberedSetUpdatingItem(chunk, mode));
...@@ -4651,6 +4666,14 @@ class PageMarkingItem : public MarkingItem { ...@@ -4651,6 +4666,14 @@ class PageMarkingItem : public MarkingItem {
return CheckAndMarkObject(task, slot); return CheckAndMarkObject(task, slot);
}, },
SlotSet::PREFREE_EMPTY_BUCKETS); SlotSet::PREFREE_EMPTY_BUCKETS);
filter = InvalidatedSlotsFilter::OldToNew(chunk_);
RememberedSetSweeping::Iterate(
chunk_,
[this, task, &filter](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return CheckAndMarkObject(task, slot);
},
SlotSet::PREFREE_EMPTY_BUCKETS);
} }
void MarkTypedPointers(YoungGenerationMarkingTask* task) { void MarkTypedPointers(YoungGenerationMarkingTask* task) {
......
...@@ -18,54 +18,38 @@ namespace internal { ...@@ -18,54 +18,38 @@ namespace internal {
enum RememberedSetIterationMode { SYNCHRONIZED, NON_SYNCHRONIZED }; enum RememberedSetIterationMode { SYNCHRONIZED, NON_SYNCHRONIZED };
// TODO(ulan): Investigate performance of de-templatizing this class. class RememberedSetOperations {
template <RememberedSetType type>
class RememberedSet : public AllStatic {
public: public:
// Given a page and a slot in that page, this function adds the slot to the // Given a page and a slot in that page, this function adds the slot to the
// remembered set. // remembered set.
template <AccessMode access_mode = AccessMode::ATOMIC> template <AccessMode access_mode>
static void Insert(MemoryChunk* chunk, Address slot_addr) { static void Insert(SlotSet* slot_set, MemoryChunk* chunk, Address slot_addr) {
DCHECK(chunk->Contains(slot_addr)); DCHECK(chunk->Contains(slot_addr));
SlotSet* slot_set = chunk->slot_set<type, access_mode>();
if (slot_set == nullptr) {
slot_set = chunk->AllocateSlotSet<type>();
}
uintptr_t offset = slot_addr - chunk->address(); uintptr_t offset = slot_addr - chunk->address();
slot_set[offset / Page::kPageSize].Insert<access_mode>(offset % slot_set[offset / Page::kPageSize].Insert<access_mode>(offset %
Page::kPageSize); Page::kPageSize);
} }
// Given a page and a slot in that page, this function returns true if template <typename Callback>
// the remembered set contains the slot. static void Iterate(SlotSet* slots, MemoryChunk* chunk, Callback callback,
static bool Contains(MemoryChunk* chunk, Address slot_addr) { SlotSet::EmptyBucketMode mode) {
DCHECK(chunk->Contains(slot_addr)); if (slots != nullptr) {
SlotSet* slot_set = chunk->slot_set<type>(); size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize;
if (slot_set == nullptr) { for (size_t page = 0; page < pages; page++) {
return false; slots[page].Iterate(callback, mode);
}
} }
uintptr_t offset = slot_addr - chunk->address();
return slot_set[offset / Page::kPageSize].Contains(offset %
Page::kPageSize);
} }
// Given a page and a slot in that page, this function removes the slot from static void Remove(SlotSet* slot_set, MemoryChunk* chunk, Address slot_addr) {
// the remembered set.
// If the slot was never added, then the function does nothing.
static void Remove(MemoryChunk* chunk, Address slot_addr) {
DCHECK(chunk->Contains(slot_addr));
SlotSet* slot_set = chunk->slot_set<type>();
if (slot_set != nullptr) { if (slot_set != nullptr) {
uintptr_t offset = slot_addr - chunk->address(); uintptr_t offset = slot_addr - chunk->address();
slot_set[offset / Page::kPageSize].Remove(offset % Page::kPageSize); slot_set[offset / Page::kPageSize].Remove(offset % Page::kPageSize);
} }
} }
// Given a page and a range of slots in that page, this function removes the static void RemoveRange(SlotSet* slot_set, MemoryChunk* chunk, Address start,
// slots from the remembered set. Address end, SlotSet::EmptyBucketMode mode) {
static void RemoveRange(MemoryChunk* chunk, Address start, Address end,
SlotSet::EmptyBucketMode mode) {
SlotSet* slot_set = chunk->slot_set<type>();
if (slot_set != nullptr) { if (slot_set != nullptr) {
uintptr_t start_offset = start - chunk->address(); uintptr_t start_offset = start - chunk->address();
uintptr_t end_offset = end - chunk->address(); uintptr_t end_offset = end - chunk->address();
...@@ -101,6 +85,53 @@ class RememberedSet : public AllStatic { ...@@ -101,6 +85,53 @@ class RememberedSet : public AllStatic {
} }
} }
} }
};
// TODO(ulan): Investigate performance of de-templatizing this class.
template <RememberedSetType type>
class RememberedSet : public AllStatic {
public:
// Given a page and a slot in that page, this function adds the slot to the
// remembered set.
template <AccessMode access_mode = AccessMode::ATOMIC>
static void Insert(MemoryChunk* chunk, Address slot_addr) {
DCHECK(chunk->Contains(slot_addr));
SlotSet* slot_set = chunk->slot_set<type, access_mode>();
if (slot_set == nullptr) {
slot_set = chunk->AllocateSlotSet<type>();
}
RememberedSetOperations::Insert<access_mode>(slot_set, chunk, slot_addr);
}
// Given a page and a slot in that page, this function returns true if
// the remembered set contains the slot.
static bool Contains(MemoryChunk* chunk, Address slot_addr) {
DCHECK(chunk->Contains(slot_addr));
SlotSet* slot_set = chunk->slot_set<type>();
if (slot_set == nullptr) {
return false;
}
uintptr_t offset = slot_addr - chunk->address();
return slot_set[offset / Page::kPageSize].Contains(offset %
Page::kPageSize);
}
// Given a page and a slot in that page, this function removes the slot from
// the remembered set.
// If the slot was never added, then the function does nothing.
static void Remove(MemoryChunk* chunk, Address slot_addr) {
DCHECK(chunk->Contains(slot_addr));
SlotSet* slot_set = chunk->slot_set<type>();
RememberedSetOperations::Remove(slot_set, chunk, slot_addr);
}
// Given a page and a range of slots in that page, this function removes the
// slots from the remembered set.
static void RemoveRange(MemoryChunk* chunk, Address start, Address end,
SlotSet::EmptyBucketMode mode) {
SlotSet* slot_set = chunk->slot_set<type>();
RememberedSetOperations::RemoveRange(slot_set, chunk, start, end, mode);
}
// Iterates and filters the remembered set with the given callback. // Iterates and filters the remembered set with the given callback.
// The callback should take (Address slot) and return SlotCallbackResult. // The callback should take (Address slot) and return SlotCallbackResult.
...@@ -122,8 +153,11 @@ class RememberedSet : public AllStatic { ...@@ -122,8 +153,11 @@ class RememberedSet : public AllStatic {
MemoryChunk* chunk; MemoryChunk* chunk;
while ((chunk = it.next()) != nullptr) { while ((chunk = it.next()) != nullptr) {
SlotSet* slots = chunk->slot_set<type>(); SlotSet* slots = chunk->slot_set<type>();
SlotSet* sweeping_slots =
type == OLD_TO_NEW ? chunk->sweeping_slot_set() : nullptr;
TypedSlotSet* typed_slots = chunk->typed_slot_set<type>(); TypedSlotSet* typed_slots = chunk->typed_slot_set<type>();
if (slots != nullptr || typed_slots != nullptr || if (slots != nullptr || sweeping_slots != nullptr ||
typed_slots != nullptr ||
chunk->invalidated_slots<type>() != nullptr) { chunk->invalidated_slots<type>() != nullptr) {
callback(chunk); callback(chunk);
} }
...@@ -140,18 +174,7 @@ class RememberedSet : public AllStatic { ...@@ -140,18 +174,7 @@ class RememberedSet : public AllStatic {
static void Iterate(MemoryChunk* chunk, Callback callback, static void Iterate(MemoryChunk* chunk, Callback callback,
SlotSet::EmptyBucketMode mode) { SlotSet::EmptyBucketMode mode) {
SlotSet* slots = chunk->slot_set<type>(); SlotSet* slots = chunk->slot_set<type>();
if (slots != nullptr) { RememberedSetOperations::Iterate(slots, chunk, callback, mode);
size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize;
int new_count = 0;
for (size_t page = 0; page < pages; page++) {
new_count += slots[page].Iterate(callback, mode);
}
// Only old-to-old slot sets are released eagerly. Old-new-slot sets are
// released by the sweeper threads.
if (type == OLD_TO_OLD && new_count == 0) {
chunk->ReleaseSlotSet<OLD_TO_OLD>();
}
}
} }
static int NumberOfPreFreedEmptyBuckets(MemoryChunk* chunk) { static int NumberOfPreFreedEmptyBuckets(MemoryChunk* chunk) {
...@@ -349,6 +372,46 @@ class UpdateTypedSlotHelper { ...@@ -349,6 +372,46 @@ class UpdateTypedSlotHelper {
} }
}; };
class RememberedSetSweeping {
public:
template <AccessMode access_mode = AccessMode::ATOMIC>
static void Insert(MemoryChunk* chunk, Address slot_addr) {
DCHECK(chunk->Contains(slot_addr));
SlotSet* slot_set = chunk->sweeping_slot_set<access_mode>();
if (slot_set == nullptr) {
slot_set = chunk->AllocateSweepingSlotSet();
}
RememberedSetOperations::Insert<access_mode>(slot_set, chunk, slot_addr);
}
static void Remove(MemoryChunk* chunk, Address slot_addr) {
DCHECK(chunk->Contains(slot_addr));
SlotSet* slot_set = chunk->sweeping_slot_set<AccessMode::ATOMIC>();
RememberedSetOperations::Remove(slot_set, chunk, slot_addr);
}
// Given a page and a range of slots in that page, this function removes the
// slots from the remembered set.
static void RemoveRange(MemoryChunk* chunk, Address start, Address end,
SlotSet::EmptyBucketMode mode) {
SlotSet* slot_set = chunk->sweeping_slot_set();
RememberedSetOperations::RemoveRange(slot_set, chunk, start, end, mode);
}
// Iterates and filters the remembered set in the given memory chunk with
// the given callback. The callback should take (Address slot) and return
// SlotCallbackResult.
//
// Notice that |mode| can only be of FREE* or PREFREE* if there are no other
// threads concurrently inserting slots.
template <typename Callback>
static void Iterate(MemoryChunk* chunk, Callback callback,
SlotSet::EmptyBucketMode mode) {
SlotSet* slots = chunk->sweeping_slot_set();
RememberedSetOperations::Iterate(slots, chunk, callback, mode);
}
};
inline SlotType SlotTypeForRelocInfoMode(RelocInfo::Mode rmode) { inline SlotType SlotTypeForRelocInfoMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTargetMode(rmode)) { if (RelocInfo::IsCodeTargetMode(rmode)) {
return CODE_TARGET_SLOT; return CODE_TARGET_SLOT;
......
...@@ -153,8 +153,15 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { ...@@ -153,8 +153,15 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
if (result == KEEP_SLOT) { if (result == KEEP_SLOT) {
SLOW_DCHECK(target.IsHeapObject()); SLOW_DCHECK(target.IsHeapObject());
RememberedSet<OLD_TO_NEW>::Insert(MemoryChunk::FromHeapObject(host), MemoryChunk* chunk = MemoryChunk::FromHeapObject(host);
slot.address());
// Sweeper is stopped during scavenge, so we can directly
// insert into its remembered set here.
if (chunk->sweeping_slot_set()) {
RememberedSetSweeping::Insert(chunk, slot.address());
} else {
RememberedSet<OLD_TO_NEW>::Insert(chunk, slot.address());
}
} }
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate( SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
HeapObject::cast(target))); HeapObject::cast(target)));
...@@ -239,8 +246,10 @@ void ScavengerCollector::CollectGarbage() { ...@@ -239,8 +246,10 @@ void ScavengerCollector::CollectGarbage() {
// access to the slots of a page and can completely avoid any locks on // access to the slots of a page and can completely avoid any locks on
// the page itself. // the page itself.
Sweeper::FilterSweepingPagesScope filter_scope(sweeper, pause_scope); Sweeper::FilterSweepingPagesScope filter_scope(sweeper, pause_scope);
filter_scope.FilterOldSpaceSweepingPages( filter_scope.FilterOldSpaceSweepingPages([](Page* page) {
[](Page* page) { return !page->ContainsSlots<OLD_TO_NEW>(); }); return !page->ContainsSlots<OLD_TO_NEW>() && !page->sweeping_slot_set();
});
RememberedSet<OLD_TO_NEW>::IterateMemoryChunks( RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
heap_, [&job](MemoryChunk* chunk) { heap_, [&job](MemoryChunk* chunk) {
job.AddItem(new PageScavengingItem(chunk)); job.AddItem(new PageScavengingItem(chunk));
...@@ -440,6 +449,14 @@ void Scavenger::ScavengePage(MemoryChunk* page) { ...@@ -440,6 +449,14 @@ void Scavenger::ScavengePage(MemoryChunk* page) {
return CheckAndScavengeObject(heap_, slot); return CheckAndScavengeObject(heap_, slot);
}, },
SlotSet::KEEP_EMPTY_BUCKETS); SlotSet::KEEP_EMPTY_BUCKETS);
filter = InvalidatedSlotsFilter::OldToNew(page);
RememberedSetSweeping::Iterate(
page,
[this, &filter](MaybeObjectSlot slot) {
CHECK(filter.IsValid(slot.address()));
return CheckAndScavengeObject(heap_, slot);
},
SlotSet::KEEP_EMPTY_BUCKETS);
if (page->invalidated_slots<OLD_TO_NEW>() != nullptr) { if (page->invalidated_slots<OLD_TO_NEW>() != nullptr) {
// The invalidated slots are not needed after old-to-new slots were // The invalidated slots are not needed after old-to-new slots were
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include "src/heap/gc-tracer.h" #include "src/heap/gc-tracer.h"
#include "src/heap/heap-controller.h" #include "src/heap/heap-controller.h"
#include "src/heap/incremental-marking-inl.h" #include "src/heap/incremental-marking-inl.h"
#include "src/heap/invalidated-slots-inl.h"
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
#include "src/heap/read-only-heap.h" #include "src/heap/read-only-heap.h"
#include "src/heap/remembered-set.h" #include "src/heap/remembered-set.h"
...@@ -698,6 +699,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, ...@@ -698,6 +699,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->InitializeReservedMemory(); chunk->InitializeReservedMemory();
base::AsAtomicPointer::Release_Store(&chunk->slot_set_[OLD_TO_NEW], nullptr); base::AsAtomicPointer::Release_Store(&chunk->slot_set_[OLD_TO_NEW], nullptr);
base::AsAtomicPointer::Release_Store(&chunk->slot_set_[OLD_TO_OLD], nullptr); base::AsAtomicPointer::Release_Store(&chunk->slot_set_[OLD_TO_OLD], nullptr);
base::AsAtomicPointer::Release_Store(&chunk->sweeping_slot_set_, nullptr);
base::AsAtomicPointer::Release_Store(&chunk->typed_slot_set_[OLD_TO_NEW], base::AsAtomicPointer::Release_Store(&chunk->typed_slot_set_[OLD_TO_NEW],
nullptr); nullptr);
base::AsAtomicPointer::Release_Store(&chunk->typed_slot_set_[OLD_TO_OLD], base::AsAtomicPointer::Release_Store(&chunk->typed_slot_set_[OLD_TO_OLD],
...@@ -855,6 +857,33 @@ Page* Page::ConvertNewToOld(Page* old_page) { ...@@ -855,6 +857,33 @@ Page* Page::ConvertNewToOld(Page* old_page) {
return new_page; return new_page;
} }
void Page::MoveOldToNewRememberedSetForSweeping() {
CHECK_NULL(sweeping_slot_set_);
sweeping_slot_set_ = slot_set_[OLD_TO_NEW];
slot_set_[OLD_TO_NEW] = nullptr;
}
void Page::MergeOldToNewRememberedSets() {
if (sweeping_slot_set_ == nullptr) return;
RememberedSet<OLD_TO_NEW>::Iterate(
this,
[this](MaybeObjectSlot slot) {
Address address = slot.address();
RememberedSetSweeping::Insert(this, address);
return KEEP_SLOT;
},
SlotSet::KEEP_EMPTY_BUCKETS);
if (slot_set_[OLD_TO_NEW]) {
ReleaseSlotSet<OLD_TO_NEW>();
}
CHECK_NULL(slot_set_[OLD_TO_NEW]);
slot_set_[OLD_TO_NEW] = sweeping_slot_set_;
sweeping_slot_set_ = nullptr;
}
size_t MemoryChunk::CommittedPhysicalMemory() { size_t MemoryChunk::CommittedPhysicalMemory() {
if (!base::OS::HasLazyCommits() || owner_identity() == LO_SPACE) if (!base::OS::HasLazyCommits() || owner_identity() == LO_SPACE)
return size(); return size();
...@@ -1375,6 +1404,7 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() { ...@@ -1375,6 +1404,7 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
} }
ReleaseSlotSet<OLD_TO_NEW>(); ReleaseSlotSet<OLD_TO_NEW>();
ReleaseSlotSet(&sweeping_slot_set_);
ReleaseSlotSet<OLD_TO_OLD>(); ReleaseSlotSet<OLD_TO_OLD>();
ReleaseTypedSlotSet<OLD_TO_NEW>(); ReleaseTypedSlotSet<OLD_TO_NEW>();
ReleaseTypedSlotSet<OLD_TO_OLD>(); ReleaseTypedSlotSet<OLD_TO_OLD>();
...@@ -1410,15 +1440,23 @@ template V8_EXPORT_PRIVATE SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_OLD>(); ...@@ -1410,15 +1440,23 @@ template V8_EXPORT_PRIVATE SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_OLD>();
template <RememberedSetType type> template <RememberedSetType type>
SlotSet* MemoryChunk::AllocateSlotSet() { SlotSet* MemoryChunk::AllocateSlotSet() {
SlotSet* slot_set = AllocateAndInitializeSlotSet(size(), address()); return AllocateSlotSet(&slot_set_[type]);
}
SlotSet* MemoryChunk::AllocateSweepingSlotSet() {
return AllocateSlotSet(&sweeping_slot_set_);
}
SlotSet* MemoryChunk::AllocateSlotSet(SlotSet** slot_set) {
SlotSet* new_slot_set = AllocateAndInitializeSlotSet(size(), address());
SlotSet* old_slot_set = base::AsAtomicPointer::Release_CompareAndSwap( SlotSet* old_slot_set = base::AsAtomicPointer::Release_CompareAndSwap(
&slot_set_[type], nullptr, slot_set); slot_set, nullptr, new_slot_set);
if (old_slot_set != nullptr) { if (old_slot_set != nullptr) {
delete[] slot_set; delete[] new_slot_set;
slot_set = old_slot_set; new_slot_set = old_slot_set;
} }
DCHECK(slot_set); DCHECK(new_slot_set);
return slot_set; return new_slot_set;
} }
template void MemoryChunk::ReleaseSlotSet<OLD_TO_NEW>(); template void MemoryChunk::ReleaseSlotSet<OLD_TO_NEW>();
...@@ -1426,10 +1464,13 @@ template void MemoryChunk::ReleaseSlotSet<OLD_TO_OLD>(); ...@@ -1426,10 +1464,13 @@ template void MemoryChunk::ReleaseSlotSet<OLD_TO_OLD>();
template <RememberedSetType type> template <RememberedSetType type>
void MemoryChunk::ReleaseSlotSet() { void MemoryChunk::ReleaseSlotSet() {
SlotSet* slot_set = slot_set_[type]; ReleaseSlotSet(&slot_set_[type]);
if (slot_set) { }
slot_set_[type] = nullptr;
delete[] slot_set; void MemoryChunk::ReleaseSlotSet(SlotSet** slot_set) {
if (*slot_set) {
delete[] * slot_set;
*slot_set = nullptr;
} }
} }
...@@ -1626,6 +1667,13 @@ void PagedSpace::RefillFreeList() { ...@@ -1626,6 +1667,13 @@ void PagedSpace::RefillFreeList() {
DCHECK(!IsDetached()); DCHECK(!IsDetached());
MarkCompactCollector* collector = heap()->mark_compact_collector(); MarkCompactCollector* collector = heap()->mark_compact_collector();
size_t added = 0; size_t added = 0;
// Avoid races with concurrent store buffer processing when merging
// old-to-new remembered sets later.
if (!is_local()) {
heap()->MoveStoreBufferEntriesToRememberedSet();
}
{ {
Page* p = nullptr; Page* p = nullptr;
while ((p = collector->sweeper()->GetSweptPageSafe(this)) != nullptr) { while ((p = collector->sweeper()->GetSweptPageSafe(this)) != nullptr) {
...@@ -1636,6 +1684,15 @@ void PagedSpace::RefillFreeList() { ...@@ -1636,6 +1684,15 @@ void PagedSpace::RefillFreeList() {
category->Reset(free_list()); category->Reset(free_list());
}); });
} }
// Also merge old-to-new remembered sets outside of collections.
// Do not do this during GC, because of races during scavenges.
// One thread might iterate remembered set, while another thread merges
// them.
if (!is_local()) {
p->MergeOldToNewRememberedSets();
}
// Only during compaction pages can actually change ownership. This is // Only during compaction pages can actually change ownership. This is
// safe because there exists no other competing action on the page links // safe because there exists no other competing action on the page links
// during compaction. // during compaction.
...@@ -1678,6 +1735,9 @@ void PagedSpace::MergeCompactionSpace(CompactionSpace* other) { ...@@ -1678,6 +1735,9 @@ void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
// Move over pages. // Move over pages.
for (auto it = other->begin(); it != other->end();) { for (auto it = other->begin(); it != other->end();) {
Page* p = *(it++); Page* p = *(it++);
p->MergeOldToNewRememberedSets();
// Relinking requires the category to be unlinked. // Relinking requires the category to be unlinked.
other->RemovePage(p); other->RemovePage(p);
AddPage(p); AddPage(p);
......
...@@ -133,7 +133,7 @@ enum class SpaceAccountingMode { kSpaceAccounted, kSpaceUnaccounted }; ...@@ -133,7 +133,7 @@ enum class SpaceAccountingMode { kSpaceAccounted, kSpaceUnaccounted };
enum RememberedSetType { enum RememberedSetType {
OLD_TO_NEW, OLD_TO_NEW,
OLD_TO_OLD, OLD_TO_OLD,
NUMBER_OF_REMEMBERED_SET_TYPES = OLD_TO_OLD + 1 NUMBER_OF_REMEMBERED_SET_TYPES
}; };
// A free list category maintains a linked list of free memory blocks. // A free list category maintains a linked list of free memory blocks.
...@@ -607,6 +607,7 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -607,6 +607,7 @@ class MemoryChunk : public BasicMemoryChunk {
+ kSizetSize // size_t progress_bar_ + kSizetSize // size_t progress_bar_
+ kIntptrSize // intptr_t live_byte_count_ + kIntptrSize // intptr_t live_byte_count_
+ kSystemPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array + kSystemPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array
+ kSystemPointerSize // SlotSet* sweeping_slot_set_
+ kSystemPointerSize * + kSystemPointerSize *
NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array
+ kSystemPointerSize * + kSystemPointerSize *
...@@ -706,6 +707,13 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -706,6 +707,13 @@ class MemoryChunk : public BasicMemoryChunk {
return slot_set_[type]; return slot_set_[type];
} }
template <AccessMode access_mode = AccessMode::ATOMIC>
SlotSet* sweeping_slot_set() {
if (access_mode == AccessMode::ATOMIC)
return base::AsAtomicPointer::Acquire_Load(&sweeping_slot_set_);
return sweeping_slot_set_;
}
template <RememberedSetType type, AccessMode access_mode = AccessMode::ATOMIC> template <RememberedSetType type, AccessMode access_mode = AccessMode::ATOMIC>
TypedSlotSet* typed_slot_set() { TypedSlotSet* typed_slot_set() {
if (access_mode == AccessMode::ATOMIC) if (access_mode == AccessMode::ATOMIC)
...@@ -715,9 +723,13 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -715,9 +723,13 @@ class MemoryChunk : public BasicMemoryChunk {
template <RememberedSetType type> template <RememberedSetType type>
V8_EXPORT_PRIVATE SlotSet* AllocateSlotSet(); V8_EXPORT_PRIVATE SlotSet* AllocateSlotSet();
SlotSet* AllocateSweepingSlotSet();
SlotSet* AllocateSlotSet(SlotSet** slot_set);
// Not safe to be called concurrently. // Not safe to be called concurrently.
template <RememberedSetType type> template <RememberedSetType type>
void ReleaseSlotSet(); void ReleaseSlotSet();
void ReleaseSlotSet(SlotSet** slot_set);
template <RememberedSetType type> template <RememberedSetType type>
TypedSlotSet* AllocateTypedSlotSet(); TypedSlotSet* AllocateTypedSlotSet();
// Not safe to be called concurrently. // Not safe to be called concurrently.
...@@ -911,6 +923,7 @@ class MemoryChunk : public BasicMemoryChunk { ...@@ -911,6 +923,7 @@ class MemoryChunk : public BasicMemoryChunk {
// set for large pages. In the latter case the number of entries in the array // set for large pages. In the latter case the number of entries in the array
// is ceil(size() / kPageSize). // is ceil(size() / kPageSize).
SlotSet* slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES]; SlotSet* slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
SlotSet* sweeping_slot_set_;
TypedSlotSet* typed_slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES]; TypedSlotSet* typed_slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
InvalidatedSlots* invalidated_slots_[NUMBER_OF_REMEMBERED_SET_TYPES]; InvalidatedSlots* invalidated_slots_[NUMBER_OF_REMEMBERED_SET_TYPES];
...@@ -1093,6 +1106,9 @@ class Page : public MemoryChunk { ...@@ -1093,6 +1106,9 @@ class Page : public MemoryChunk {
void AllocateFreeListCategories(); void AllocateFreeListCategories();
void ReleaseFreeListCategories(); void ReleaseFreeListCategories();
void MoveOldToNewRememberedSetForSweeping();
void MergeOldToNewRememberedSets();
#ifdef DEBUG #ifdef DEBUG
void Print(); void Print();
#endif // DEBUG #endif // DEBUG
......
...@@ -320,8 +320,8 @@ int Sweeper::RawSweep( ...@@ -320,8 +320,8 @@ int Sweeper::RawSweep(
ClearFreedMemoryMode::kClearFreedMemory); ClearFreedMemoryMode::kClearFreedMemory);
} }
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size); if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, free_end, RememberedSetSweeping::RemoveRange(p, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS); SlotSet::KEEP_EMPTY_BUCKETS);
RememberedSet<OLD_TO_OLD>::RemoveRange(p, free_start, free_end, RememberedSet<OLD_TO_OLD>::RemoveRange(p, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS); SlotSet::KEEP_EMPTY_BUCKETS);
if (non_empty_typed_slots) { if (non_empty_typed_slots) {
...@@ -354,8 +354,8 @@ int Sweeper::RawSweep( ...@@ -354,8 +354,8 @@ int Sweeper::RawSweep(
ClearFreedMemoryMode::kClearFreedMemory); ClearFreedMemoryMode::kClearFreedMemory);
} }
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size); if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, p->area_end(), RememberedSetSweeping::RemoveRange(p, free_start, p->area_end(),
SlotSet::KEEP_EMPTY_BUCKETS); SlotSet::KEEP_EMPTY_BUCKETS);
RememberedSet<OLD_TO_OLD>::RemoveRange(p, free_start, p->area_end(), RememberedSet<OLD_TO_OLD>::RemoveRange(p, free_start, p->area_end(),
SlotSet::KEEP_EMPTY_BUCKETS); SlotSet::KEEP_EMPTY_BUCKETS);
if (non_empty_typed_slots) { if (non_empty_typed_slots) {
...@@ -516,6 +516,7 @@ void Sweeper::PrepareToBeSweptPage(AllocationSpace space, Page* page) { ...@@ -516,6 +516,7 @@ void Sweeper::PrepareToBeSweptPage(AllocationSpace space, Page* page) {
DCHECK(!category->is_linked(page->owner()->free_list())); DCHECK(!category->is_linked(page->owner()->free_list()));
}); });
#endif // DEBUG #endif // DEBUG
page->MoveOldToNewRememberedSetForSweeping();
page->set_concurrent_sweeping_state(Page::kSweepingPending); page->set_concurrent_sweeping_state(Page::kSweepingPending);
heap_->paged_space(space)->IncreaseAllocatedBytes( heap_->paged_space(space)->IncreaseAllocatedBytes(
marking_state_->live_bytes(page), page); marking_state_->live_bytes(page), page);
......
...@@ -2795,7 +2795,6 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object, ...@@ -2795,7 +2795,6 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
index, HeapNumber::cast(value).value_as_bits()); index, HeapNumber::cast(value).value_as_bits());
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) { if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
// Transition from tagged to untagged slot. // Transition from tagged to untagged slot.
heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*object); MemoryChunk* chunk = MemoryChunk::FromHeapObject(*object);
chunk->InvalidateRecordedSlots(*object); chunk->InvalidateRecordedSlots(*object);
} else { } else {
......
...@@ -153,6 +153,9 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver, ...@@ -153,6 +153,9 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
// Slot clearing is the reason why this entire function cannot currently // Slot clearing is the reason why this entire function cannot currently
// be implemented in the DeleteProperty stub. // be implemented in the DeleteProperty stub.
if (index.is_inobject() && !receiver_map->IsUnboxedDoubleField(index)) { if (index.is_inobject() && !receiver_map->IsUnboxedDoubleField(index)) {
// We need to clear the recorded slot in this case because in-object
// slack tracking might not be finished. This ensures that we don't
// have recorded slots in free space.
isolate->heap()->ClearRecordedSlot(*receiver, isolate->heap()->ClearRecordedSlot(*receiver,
receiver->RawField(index.offset())); receiver->RawField(index.offset()));
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*receiver); MemoryChunk* chunk = MemoryChunk::FromHeapObject(*receiver);
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment