Commit 9146bc5e authored by ulan's avatar ulan Committed by Commit bot

Revert of Replace slots buffer with remembered set. (patchset #14 id:250001 of...

Revert of Replace slots buffer with remembered set. (patchset #14 id:250001 of https://codereview.chromium.org/1703823002/ )

Reason for revert:
Revert because of canary crashes: crbug.com/589413

Original issue's description:
> Replace slots buffer with remembered set.
>
> Slots pointing to evacuation candidates are now recorded in the new RememberedSet<OLD_TO_OLD>.
>
> The remembered set is extended to support typed slots.
>
> During parallel evacuation all migration slots are recorded in local slots buffers.
> After evacuation all local slots are added to the remembered set.
>
> BUG=chromium:578883
> LOG=NO
>
> Committed: https://crrev.com/2285a99ef6f7d52f4f0c4d88a7db4224443ee152
> Cr-Commit-Position: refs/heads/master@{#34212}

TBR=jochen@chromium.org,hpayer@chromium.org,mlippautz@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:578883

Review URL: https://codereview.chromium.org/1725073003

Cr-Commit-Position: refs/heads/master@{#34238}
parent 666aec03
...@@ -1072,6 +1072,8 @@ source_set("v8_base") { ...@@ -1072,6 +1072,8 @@ source_set("v8_base") {
"src/heap/scavenger.cc", "src/heap/scavenger.cc",
"src/heap/scavenger.h", "src/heap/scavenger.h",
"src/heap/slot-set.h", "src/heap/slot-set.h",
"src/heap/slots-buffer.cc",
"src/heap/slots-buffer.h",
"src/heap/spaces-inl.h", "src/heap/spaces-inl.h",
"src/heap/spaces.cc", "src/heap/spaces.cc",
"src/heap/spaces.h", "src/heap/spaces.h",
......
...@@ -5578,7 +5578,6 @@ void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) { ...@@ -5578,7 +5578,6 @@ void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) {
Page* page = Page::FromAddress(slot_addr); Page* page = Page::FromAddress(slot_addr);
DCHECK_EQ(page->owner()->identity(), OLD_SPACE); DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
RememberedSet<OLD_TO_NEW>::Remove(page, slot_addr); RememberedSet<OLD_TO_NEW>::Remove(page, slot_addr);
RememberedSet<OLD_TO_OLD>::Remove(page, slot_addr);
} }
} }
...@@ -5591,7 +5590,6 @@ void Heap::ClearRecordedSlotRange(HeapObject* object, Object** start, ...@@ -5591,7 +5590,6 @@ void Heap::ClearRecordedSlotRange(HeapObject* object, Object** start,
Page* page = Page::FromAddress(start_addr); Page* page = Page::FromAddress(start_addr);
DCHECK_EQ(page->owner()->identity(), OLD_SPACE); DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
RememberedSet<OLD_TO_NEW>::RemoveRange(page, start_addr, end_addr); RememberedSet<OLD_TO_NEW>::RemoveRange(page, start_addr, end_addr);
RememberedSet<OLD_TO_OLD>::RemoveRange(page, start_addr, end_addr);
} }
} }
......
...@@ -26,10 +26,11 @@ void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host, Object** slot, ...@@ -26,10 +26,11 @@ void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host, Object** slot,
} }
} }
void IncrementalMarking::RecordWriteIntoCode(Code* host, RelocInfo* rinfo,
void IncrementalMarking::RecordWriteIntoCode(HeapObject* obj, RelocInfo* rinfo,
Object* value) { Object* value) {
if (IsMarking() && value->IsHeapObject()) { if (IsMarking() && value->IsHeapObject()) {
RecordWriteIntoCodeSlow(host, rinfo, value); RecordWriteIntoCodeSlow(obj, rinfo, value);
} }
} }
......
...@@ -131,11 +131,13 @@ void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host, ...@@ -131,11 +131,13 @@ void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
} }
} }
void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj,
RelocInfo* rinfo,
Object* value) { Object* value) {
if (BaseRecordWrite(host, value)) { if (BaseRecordWrite(obj, value)) {
// Object is not going to be rescanned. We need to record the slot. // Object is not going to be rescanned. We need to record the slot.
heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); heap_->mark_compact_collector()->RecordRelocSlot(rinfo, value);
} }
} }
......
...@@ -165,13 +165,15 @@ class IncrementalMarking { ...@@ -165,13 +165,15 @@ class IncrementalMarking {
// the incremental cycle (stays white). // the incremental cycle (stays white).
INLINE(bool BaseRecordWrite(HeapObject* obj, Object* value)); INLINE(bool BaseRecordWrite(HeapObject* obj, Object* value));
INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value)); INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value));
INLINE(void RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* value)); INLINE(void RecordWriteIntoCode(HeapObject* obj, RelocInfo* rinfo,
Object* value));
INLINE(void RecordWriteOfCodeEntry(JSFunction* host, Object** slot, INLINE(void RecordWriteOfCodeEntry(JSFunction* host, Object** slot,
Code* value)); Code* value));
void RecordWriteSlow(HeapObject* obj, Object** slot, Object* value); void RecordWriteSlow(HeapObject* obj, Object** slot, Object* value);
void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* value); void RecordWriteIntoCodeSlow(HeapObject* obj, RelocInfo* rinfo,
Object* value);
void RecordWriteOfCodeEntrySlow(JSFunction* host, Object** slot, Code* value); void RecordWriteOfCodeEntrySlow(JSFunction* host, Object** slot, Code* value);
void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value); void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
void RecordCodeTargetPatch(Address pc, HeapObject* value); void RecordCodeTargetPatch(Address pc, HeapObject* value);
......
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
#define V8_HEAP_MARK_COMPACT_INL_H_ #define V8_HEAP_MARK_COMPACT_INL_H_
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
#include "src/heap/remembered-set.h" #include "src/heap/slots-buffer.h"
#include "src/isolate.h" #include "src/isolate.h"
namespace v8 { namespace v8 {
...@@ -70,11 +70,25 @@ bool MarkCompactCollector::IsMarked(Object* obj) { ...@@ -70,11 +70,25 @@ bool MarkCompactCollector::IsMarked(Object* obj) {
void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot, void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
Object* target) { Object* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (target_page->IsEvacuationCandidate() && if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(object)) { !ShouldSkipEvacuationSlotRecording(object)) {
RememberedSet<OLD_TO_OLD>::Insert(source_page, if (!SlotsBuffer::AddTo(slots_buffer_allocator_,
reinterpret_cast<Address>(slot)); target_page->slots_buffer_address(), slot,
SlotsBuffer::FAIL_ON_OVERFLOW)) {
EvictPopularEvacuationCandidate(target_page);
}
}
}
void MarkCompactCollector::ForceRecordSlot(HeapObject* object, Object** slot,
Object* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(object)) {
CHECK(SlotsBuffer::AddTo(slots_buffer_allocator_,
target_page->slots_buffer_address(), slot,
SlotsBuffer::IGNORE_OVERFLOW));
} }
} }
......
...@@ -21,6 +21,7 @@ ...@@ -21,6 +21,7 @@
#include "src/heap/object-stats.h" #include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h" #include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h" #include "src/heap/objects-visiting.h"
#include "src/heap/slots-buffer.h"
#include "src/heap/spaces-inl.h" #include "src/heap/spaces-inl.h"
#include "src/ic/ic.h" #include "src/ic/ic.h"
#include "src/ic/stub-cache.h" #include "src/ic/stub-cache.h"
...@@ -54,6 +55,8 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap) ...@@ -54,6 +55,8 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
marking_parity_(ODD_MARKING_PARITY), marking_parity_(ODD_MARKING_PARITY),
was_marked_incrementally_(false), was_marked_incrementally_(false),
evacuation_(false), evacuation_(false),
slots_buffer_allocator_(nullptr),
migration_slots_buffer_(nullptr),
heap_(heap), heap_(heap),
marking_deque_memory_(NULL), marking_deque_memory_(NULL),
marking_deque_memory_committed_(0), marking_deque_memory_committed_(0),
...@@ -246,6 +249,7 @@ void MarkCompactCollector::SetUp() { ...@@ -246,6 +249,7 @@ void MarkCompactCollector::SetUp() {
free_list_map_space_.Reset(new FreeList(heap_->map_space())); free_list_map_space_.Reset(new FreeList(heap_->map_space()));
EnsureMarkingDequeIsReserved(); EnsureMarkingDequeIsReserved();
EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
slots_buffer_allocator_ = new SlotsBufferAllocator();
if (FLAG_flush_code) { if (FLAG_flush_code) {
code_flusher_ = new CodeFlusher(isolate()); code_flusher_ = new CodeFlusher(isolate());
...@@ -259,6 +263,7 @@ void MarkCompactCollector::SetUp() { ...@@ -259,6 +263,7 @@ void MarkCompactCollector::SetUp() {
void MarkCompactCollector::TearDown() { void MarkCompactCollector::TearDown() {
AbortCompaction(); AbortCompaction();
delete marking_deque_memory_; delete marking_deque_memory_;
delete slots_buffer_allocator_;
delete code_flusher_; delete code_flusher_;
} }
...@@ -305,26 +310,55 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) { ...@@ -305,26 +310,55 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
return compacting_; return compacting_;
} }
void MarkCompactCollector::ClearInvalidRememberedSetSlots() {
void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
{ {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_CLEAR_STORE_BUFFER); GCTracer::Scope::MC_CLEAR_STORE_BUFFER);
RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap()); RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap());
} }
// There is not need to filter the old to old set because
// it is completely cleared after the mark-compact GC.
// The slots that become invalid due to runtime transitions are
// cleared eagerly immediately after the transition.
{
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER);
for (Page* p : evacuation_candidates_) {
SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
}
}
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap()); VerifyValidStoreAndSlotsBufferEntries();
RememberedSet<OLD_TO_OLD>::VerifyValidSlots(heap());
} }
#endif #endif
} }
#ifdef VERIFY_HEAP
static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
SlotsBuffer::VerifySlots(heap, p->slots_buffer());
}
}
void MarkCompactCollector::VerifyValidStoreAndSlotsBufferEntries() {
RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap());
VerifyValidSlotsBufferEntries(heap(), heap()->old_space());
VerifyValidSlotsBufferEntries(heap(), heap()->code_space());
VerifyValidSlotsBufferEntries(heap(), heap()->map_space());
LargeObjectIterator it(heap()->lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
SlotsBuffer::VerifySlots(heap(), chunk->slots_buffer());
}
}
#endif
void MarkCompactCollector::CollectGarbage() { void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will // Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed. // update the state as they proceed.
...@@ -674,8 +708,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { ...@@ -674,8 +708,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
// of a GC all evacuation candidates are cleared and their slot buffers are // of a GC all evacuation candidates are cleared and their slot buffers are
// released. // released.
CHECK(!p->IsEvacuationCandidate()); CHECK(!p->IsEvacuationCandidate());
CHECK_NULL(p->old_to_old_slots()); CHECK(p->slots_buffer() == nullptr);
CHECK_NULL(p->typed_old_to_old_slots());
CHECK(p->SweepingDone()); CHECK(p->SweepingDone());
DCHECK(p->area_size() == area_size); DCHECK(p->area_size() == area_size);
pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p)); pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p));
...@@ -781,8 +814,8 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { ...@@ -781,8 +814,8 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
void MarkCompactCollector::AbortCompaction() { void MarkCompactCollector::AbortCompaction() {
if (compacting_) { if (compacting_) {
RememberedSet<OLD_TO_OLD>::ClearAll(heap());
for (Page* p : evacuation_candidates_) { for (Page* p : evacuation_candidates_) {
slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
p->ClearEvacuationCandidate(); p->ClearEvacuationCandidate();
p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
} }
...@@ -1497,12 +1530,12 @@ class MarkCompactCollector::EvacuateVisitorBase ...@@ -1497,12 +1530,12 @@ class MarkCompactCollector::EvacuateVisitorBase
: public MarkCompactCollector::HeapObjectVisitor { : public MarkCompactCollector::HeapObjectVisitor {
public: public:
EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces, EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces,
LocalSlotsBuffer* old_to_old_slots, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots) LocalStoreBuffer* local_store_buffer)
: heap_(heap), : heap_(heap),
evacuation_slots_buffer_(evacuation_slots_buffer),
compaction_spaces_(compaction_spaces), compaction_spaces_(compaction_spaces),
old_to_old_slots_(old_to_old_slots), local_store_buffer_(local_store_buffer) {}
old_to_new_slots_(old_to_new_slots) {}
bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object, bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object,
HeapObject** target_object) { HeapObject** target_object) {
...@@ -1512,7 +1545,7 @@ class MarkCompactCollector::EvacuateVisitorBase ...@@ -1512,7 +1545,7 @@ class MarkCompactCollector::EvacuateVisitorBase
if (allocation.To(target_object)) { if (allocation.To(target_object)) {
heap_->mark_compact_collector()->MigrateObject( heap_->mark_compact_collector()->MigrateObject(
*target_object, object, size, target_space->identity(), *target_object, object, size, target_space->identity(),
old_to_old_slots_, old_to_new_slots_); evacuation_slots_buffer_, local_store_buffer_);
return true; return true;
} }
return false; return false;
...@@ -1520,9 +1553,9 @@ class MarkCompactCollector::EvacuateVisitorBase ...@@ -1520,9 +1553,9 @@ class MarkCompactCollector::EvacuateVisitorBase
protected: protected:
Heap* heap_; Heap* heap_;
SlotsBuffer** evacuation_slots_buffer_;
CompactionSpaceCollection* compaction_spaces_; CompactionSpaceCollection* compaction_spaces_;
LocalSlotsBuffer* old_to_old_slots_; LocalStoreBuffer* local_store_buffer_;
LocalSlotsBuffer* old_to_new_slots_;
}; };
...@@ -1534,11 +1567,11 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1534,11 +1567,11 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
explicit EvacuateNewSpaceVisitor(Heap* heap, explicit EvacuateNewSpaceVisitor(Heap* heap,
CompactionSpaceCollection* compaction_spaces, CompactionSpaceCollection* compaction_spaces,
LocalSlotsBuffer* old_to_old_slots, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots, LocalStoreBuffer* local_store_buffer,
HashMap* local_pretenuring_feedback) HashMap* local_pretenuring_feedback)
: EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots, : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer,
old_to_new_slots), local_store_buffer),
buffer_(LocalAllocationBuffer::InvalidBuffer()), buffer_(LocalAllocationBuffer::InvalidBuffer()),
space_to_allocate_(NEW_SPACE), space_to_allocate_(NEW_SPACE),
promoted_size_(0), promoted_size_(0),
...@@ -1565,8 +1598,8 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1565,8 +1598,8 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
AllocationSpace space = AllocateTargetObject(object, &target); AllocationSpace space = AllocateTargetObject(object, &target);
heap_->mark_compact_collector()->MigrateObject( heap_->mark_compact_collector()->MigrateObject(
HeapObject::cast(target), object, size, space, HeapObject::cast(target), object, size, space,
(space == NEW_SPACE) ? nullptr : old_to_old_slots_, (space == NEW_SPACE) ? nullptr : evacuation_slots_buffer_,
(space == NEW_SPACE) ? nullptr : old_to_new_slots_); (space == NEW_SPACE) ? nullptr : local_store_buffer_);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) { if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
} }
...@@ -1686,10 +1719,10 @@ class MarkCompactCollector::EvacuateOldSpaceVisitor final ...@@ -1686,10 +1719,10 @@ class MarkCompactCollector::EvacuateOldSpaceVisitor final
public: public:
EvacuateOldSpaceVisitor(Heap* heap, EvacuateOldSpaceVisitor(Heap* heap,
CompactionSpaceCollection* compaction_spaces, CompactionSpaceCollection* compaction_spaces,
LocalSlotsBuffer* old_to_old_slots, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots) LocalStoreBuffer* local_store_buffer)
: EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots, : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer,
old_to_new_slots) {} local_store_buffer) {}
bool Visit(HeapObject* object) override { bool Visit(HeapObject* object) override {
CompactionSpace* target_space = compaction_spaces_->Get( CompactionSpace* target_space = compaction_spaces_->Get(
...@@ -2146,7 +2179,7 @@ void MarkCompactCollector::ClearNonLiveReferences() { ...@@ -2146,7 +2179,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
ClearWeakCollections(); ClearWeakCollections();
ClearInvalidRememberedSetSlots(); ClearInvalidStoreAndSlotsBufferEntries();
} }
...@@ -2509,56 +2542,88 @@ void MarkCompactCollector::AbortTransitionArrays() { ...@@ -2509,56 +2542,88 @@ void MarkCompactCollector::AbortTransitionArrays() {
} }
void MarkCompactCollector::RecordMigratedSlot( void MarkCompactCollector::RecordMigratedSlot(
Object* value, Address slot, LocalSlotsBuffer* old_to_old_slots, Object* value, Address slot, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots) { LocalStoreBuffer* local_store_buffer) {
// When parallel compaction is in progress, store and slots buffer entries // When parallel compaction is in progress, store and slots buffer entries
// require synchronization. // require synchronization.
if (heap_->InNewSpace(value)) { if (heap_->InNewSpace(value)) {
if (compaction_in_progress_) { if (compaction_in_progress_) {
old_to_new_slots->Record(slot); local_store_buffer->Record(slot);
} else { } else {
Page* page = Page::FromAddress(slot); Page* page = Page::FromAddress(slot);
RememberedSet<OLD_TO_NEW>::Insert(page, slot); RememberedSet<OLD_TO_NEW>::Insert(page, slot);
} }
} else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
old_to_old_slots->Record(slot); SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
reinterpret_cast<Object**>(slot),
SlotsBuffer::IGNORE_OVERFLOW);
} }
} }
static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
void MarkCompactCollector::RecordMigratedCodeEntrySlot(
Address code_entry, Address code_entry_slot,
SlotsBuffer** evacuation_slots_buffer) {
if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
SlotsBuffer::IGNORE_OVERFLOW);
}
}
void MarkCompactCollector::RecordMigratedCodeObjectSlot(
Address code_object, SlotsBuffer** evacuation_slots_buffer) {
SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
SlotsBuffer::RELOCATED_CODE_OBJECT, code_object,
SlotsBuffer::IGNORE_OVERFLOW);
}
static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTarget(rmode)) { if (RelocInfo::IsCodeTarget(rmode)) {
return CODE_TARGET_SLOT; return SlotsBuffer::CODE_TARGET_SLOT;
} else if (RelocInfo::IsCell(rmode)) { } else if (RelocInfo::IsCell(rmode)) {
return CELL_TARGET_SLOT; return SlotsBuffer::CELL_TARGET_SLOT;
} else if (RelocInfo::IsEmbeddedObject(rmode)) { } else if (RelocInfo::IsEmbeddedObject(rmode)) {
return EMBEDDED_OBJECT_SLOT; return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
} else if (RelocInfo::IsDebugBreakSlot(rmode)) { } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
return DEBUG_TARGET_SLOT; return SlotsBuffer::DEBUG_TARGET_SLOT;
} }
UNREACHABLE(); UNREACHABLE();
return NUMBER_OF_SLOT_TYPES; return SlotsBuffer::NUMBER_OF_SLOT_TYPES;
}
static inline SlotsBuffer::SlotType DecodeSlotType(
SlotsBuffer::ObjectSlot slot) {
return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
} }
void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
Object* target) { void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
RelocInfo::Mode rmode = rinfo->rmode(); RelocInfo::Mode rmode = rinfo->rmode();
if (target_page->IsEvacuationCandidate() && if (target_page->IsEvacuationCandidate() &&
(rinfo->host() == NULL || (rinfo->host() == NULL ||
!ShouldSkipEvacuationSlotRecording(rinfo->host()))) { !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
Address addr = rinfo->pc(); Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRMode(rmode); SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode);
if (rinfo->IsInConstantPool()) { if (rinfo->IsInConstantPool()) {
addr = rinfo->constant_pool_entry_address(); addr = rinfo->constant_pool_entry_address();
if (RelocInfo::IsCodeTarget(rmode)) { if (RelocInfo::IsCodeTarget(rmode)) {
slot_type = CODE_ENTRY_SLOT; slot_type = SlotsBuffer::CODE_ENTRY_SLOT;
} else { } else {
DCHECK(RelocInfo::IsEmbeddedObject(rmode)); DCHECK(RelocInfo::IsEmbeddedObject(rmode));
slot_type = OBJECT_SLOT; slot_type = SlotsBuffer::OBJECT_SLOT;
} }
} }
RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, slot_type, addr); bool success = SlotsBuffer::AddTo(
slots_buffer_allocator_, target_page->slots_buffer_address(), slot_type,
addr, SlotsBuffer::FAIL_ON_OVERFLOW);
if (!success) {
EvictPopularEvacuationCandidate(target_page);
}
} }
} }
...@@ -2566,21 +2631,23 @@ void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo, ...@@ -2566,21 +2631,23 @@ void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
class RecordMigratedSlotVisitor final : public ObjectVisitor { class RecordMigratedSlotVisitor final : public ObjectVisitor {
public: public:
RecordMigratedSlotVisitor(MarkCompactCollector* collector, RecordMigratedSlotVisitor(MarkCompactCollector* collector,
LocalSlotsBuffer* old_to_old_slots, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots) LocalStoreBuffer* local_store_buffer)
: collector_(collector), : collector_(collector),
old_to_old_slots_(old_to_old_slots), evacuation_slots_buffer_(evacuation_slots_buffer),
old_to_new_slots_(old_to_new_slots) {} local_store_buffer_(local_store_buffer) {}
V8_INLINE void VisitPointer(Object** p) override { V8_INLINE void VisitPointer(Object** p) override {
collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p), collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p),
old_to_old_slots_, old_to_new_slots_); evacuation_slots_buffer_,
local_store_buffer_);
} }
V8_INLINE void VisitPointers(Object** start, Object** end) override { V8_INLINE void VisitPointers(Object** start, Object** end) override {
while (start < end) { while (start < end) {
collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start), collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start),
old_to_old_slots_, old_to_new_slots_); evacuation_slots_buffer_,
local_store_buffer_);
++start; ++start;
} }
} }
...@@ -2588,16 +2655,15 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor { ...@@ -2588,16 +2655,15 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor {
V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {
if (collector_->compacting_) { if (collector_->compacting_) {
Address code_entry = Memory::Address_at(code_entry_slot); Address code_entry = Memory::Address_at(code_entry_slot);
if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { collector_->RecordMigratedCodeEntrySlot(code_entry, code_entry_slot,
old_to_old_slots_->Record(CODE_ENTRY_SLOT, code_entry_slot); evacuation_slots_buffer_);
}
} }
} }
private: private:
MarkCompactCollector* collector_; MarkCompactCollector* collector_;
LocalSlotsBuffer* old_to_old_slots_; SlotsBuffer** evacuation_slots_buffer_;
LocalSlotsBuffer* old_to_new_slots_; LocalStoreBuffer* local_store_buffer_;
}; };
...@@ -2617,28 +2683,31 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor { ...@@ -2617,28 +2683,31 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor {
// pointers to new space. // pointers to new space.
void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
int size, AllocationSpace dest, int size, AllocationSpace dest,
LocalSlotsBuffer* old_to_old_slots, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots) { LocalStoreBuffer* local_store_buffer) {
Address dst_addr = dst->address(); Address dst_addr = dst->address();
Address src_addr = src->address(); Address src_addr = src->address();
DCHECK(heap()->AllowedToBeMigrated(src, dest)); DCHECK(heap()->AllowedToBeMigrated(src, dest));
DCHECK(dest != LO_SPACE); DCHECK(dest != LO_SPACE);
if (dest == OLD_SPACE) { if (dest == OLD_SPACE) {
DCHECK_OBJECT_SIZE(size); DCHECK_OBJECT_SIZE(size);
DCHECK(evacuation_slots_buffer != nullptr);
DCHECK(IsAligned(size, kPointerSize)); DCHECK(IsAligned(size, kPointerSize));
heap()->MoveBlock(dst->address(), src->address(), size); heap()->MoveBlock(dst->address(), src->address(), size);
RecordMigratedSlotVisitor visitor(this, old_to_old_slots, old_to_new_slots); RecordMigratedSlotVisitor visitor(this, evacuation_slots_buffer,
local_store_buffer);
dst->IterateBody(&visitor); dst->IterateBody(&visitor);
} else if (dest == CODE_SPACE) { } else if (dest == CODE_SPACE) {
DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); DCHECK_CODEOBJECT_SIZE(size, heap()->code_space());
DCHECK(evacuation_slots_buffer != nullptr);
PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr));
heap()->MoveBlock(dst_addr, src_addr, size); heap()->MoveBlock(dst_addr, src_addr, size);
old_to_old_slots->Record(RELOCATED_CODE_OBJECT, dst_addr); RecordMigratedCodeObjectSlot(dst_addr, evacuation_slots_buffer);
Code::cast(dst)->Relocate(dst_addr - src_addr); Code::cast(dst)->Relocate(dst_addr - src_addr);
} else { } else {
DCHECK_OBJECT_SIZE(size); DCHECK_OBJECT_SIZE(size);
DCHECK(old_to_old_slots == nullptr); DCHECK(evacuation_slots_buffer == nullptr);
DCHECK(dest == NEW_SPACE); DCHECK(dest == NEW_SPACE);
heap()->MoveBlock(dst_addr, src_addr, size); heap()->MoveBlock(dst_addr, src_addr, size);
} }
...@@ -2646,40 +2715,41 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, ...@@ -2646,40 +2715,41 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
Memory::Address_at(src_addr) = dst_addr; Memory::Address_at(src_addr) = dst_addr;
} }
static inline void UpdateTypedSlot(Isolate* isolate, ObjectVisitor* v,
SlotType slot_type, Address addr) { static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v,
SlotsBuffer::SlotType slot_type, Address addr) {
switch (slot_type) { switch (slot_type) {
case CODE_TARGET_SLOT: { case SlotsBuffer::CODE_TARGET_SLOT: {
RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL); RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL);
rinfo.Visit(isolate, v); rinfo.Visit(isolate, v);
break; break;
} }
case CELL_TARGET_SLOT: { case SlotsBuffer::CELL_TARGET_SLOT: {
RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL); RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL);
rinfo.Visit(isolate, v); rinfo.Visit(isolate, v);
break; break;
} }
case CODE_ENTRY_SLOT: { case SlotsBuffer::CODE_ENTRY_SLOT: {
v->VisitCodeEntry(addr); v->VisitCodeEntry(addr);
break; break;
} }
case RELOCATED_CODE_OBJECT: { case SlotsBuffer::RELOCATED_CODE_OBJECT: {
HeapObject* obj = HeapObject::FromAddress(addr); HeapObject* obj = HeapObject::FromAddress(addr);
Code::BodyDescriptor::IterateBody(obj, v); Code::BodyDescriptor::IterateBody(obj, v);
break; break;
} }
case DEBUG_TARGET_SLOT: { case SlotsBuffer::DEBUG_TARGET_SLOT: {
RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0, RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0,
NULL); NULL);
if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v); if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
break; break;
} }
case EMBEDDED_OBJECT_SLOT: { case SlotsBuffer::EMBEDDED_OBJECT_SLOT: {
RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL); RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
rinfo.Visit(isolate, v); rinfo.Visit(isolate, v);
break; break;
} }
case OBJECT_SLOT: { case SlotsBuffer::OBJECT_SLOT: {
v->VisitPointer(reinterpret_cast<Object**>(addr)); v->VisitPointer(reinterpret_cast<Object**>(addr));
break; break;
} }
...@@ -2784,6 +2854,32 @@ class PointersUpdatingVisitor : public ObjectVisitor { ...@@ -2784,6 +2854,32 @@ class PointersUpdatingVisitor : public ObjectVisitor {
}; };
void MarkCompactCollector::UpdateSlots(SlotsBuffer* buffer) {
PointersUpdatingVisitor v(heap_);
size_t buffer_size = buffer->Size();
for (size_t slot_idx = 0; slot_idx < buffer_size; ++slot_idx) {
SlotsBuffer::ObjectSlot slot = buffer->Get(slot_idx);
if (!SlotsBuffer::IsTypedSlot(slot)) {
PointersUpdatingVisitor::UpdateSlot(heap_, slot);
} else {
++slot_idx;
DCHECK(slot_idx < buffer_size);
UpdateSlot(heap_->isolate(), &v, DecodeSlotType(slot),
reinterpret_cast<Address>(buffer->Get(slot_idx)));
}
}
}
void MarkCompactCollector::UpdateSlotsRecordedIn(SlotsBuffer* buffer) {
while (buffer != NULL) {
UpdateSlots(buffer);
buffer = buffer->next();
}
}
static void UpdatePointer(HeapObject** address, HeapObject* object) { static void UpdatePointer(HeapObject** address, HeapObject* object) {
MapWord map_word = object->map_word(); MapWord map_word = object->map_word();
// Since we only filter invalid slots in old space, the store buffer can // Since we only filter invalid slots in old space, the store buffer can
...@@ -2905,33 +3001,33 @@ bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, ...@@ -2905,33 +3001,33 @@ bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
return false; return false;
} }
HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) {
Page* p = Page::FromAddress(slot); bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) {
// This function does not support large objects right now. // This function does not support large objects right now.
Space* owner = p->owner(); Space* owner = p->owner();
if (owner == heap_->lo_space() || owner == nullptr) { if (owner == heap_->lo_space() || owner == NULL) {
Object* large_object = heap_->lo_space()->FindObject(slot); Object* large_object = heap_->lo_space()->FindObject(slot);
// This object has to exist, otherwise we would not have recorded a slot // This object has to exist, otherwise we would not have recorded a slot
// for it. // for it.
CHECK(large_object->IsHeapObject()); CHECK(large_object->IsHeapObject());
HeapObject* large_heap_object = HeapObject::cast(large_object); HeapObject* large_heap_object = HeapObject::cast(large_object);
if (IsMarked(large_heap_object)) { if (IsMarked(large_heap_object)) {
return large_heap_object; return true;
} }
return nullptr; return false;
} }
LiveObjectIterator<kBlackObjects> it(p); LiveObjectIterator<kBlackObjects> it(p);
HeapObject* object = nullptr; HeapObject* object = NULL;
while ((object = it.Next()) != nullptr) { while ((object = it.Next()) != NULL) {
int size = object->Size(); int size = object->Size();
if (object->address() > slot) return nullptr;
if (object->address() > slot) return false;
if (object->address() <= slot && slot < (object->address() + size)) { if (object->address() <= slot && slot < (object->address() + size)) {
return object; return true;
} }
} }
return nullptr; return false;
} }
...@@ -2950,6 +3046,18 @@ bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { ...@@ -2950,6 +3046,18 @@ bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
} }
void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
HeapObject* object) {
// The target object has to be black.
CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Use the mark bit iterator to find out about liveness of the slot.
CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
}
void MarkCompactCollector::EvacuateNewSpacePrologue() { void MarkCompactCollector::EvacuateNewSpacePrologue() {
NewSpace* new_space = heap()->new_space(); NewSpace* new_space = heap()->new_space();
NewSpacePageIterator it(new_space->bottom(), new_space->top()); NewSpacePageIterator it(new_space->bottom(), new_space->top());
...@@ -2966,6 +3074,12 @@ void MarkCompactCollector::EvacuateNewSpaceEpilogue() { ...@@ -2966,6 +3074,12 @@ void MarkCompactCollector::EvacuateNewSpaceEpilogue() {
} }
void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized(
SlotsBuffer* evacuation_slots_buffer) {
base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_);
evacuation_slots_buffers_.Add(evacuation_slots_buffer);
}
class MarkCompactCollector::Evacuator : public Malloced { class MarkCompactCollector::Evacuator : public Malloced {
public: public:
Evacuator(MarkCompactCollector* collector, Evacuator(MarkCompactCollector* collector,
...@@ -2975,13 +3089,15 @@ class MarkCompactCollector::Evacuator : public Malloced { ...@@ -2975,13 +3089,15 @@ class MarkCompactCollector::Evacuator : public Malloced {
evacuation_candidates_(evacuation_candidates), evacuation_candidates_(evacuation_candidates),
newspace_evacuation_candidates_(newspace_evacuation_candidates), newspace_evacuation_candidates_(newspace_evacuation_candidates),
compaction_spaces_(collector->heap()), compaction_spaces_(collector->heap()),
local_slots_buffer_(nullptr),
local_store_buffer_(collector->heap()),
local_pretenuring_feedback_(HashMap::PointersMatch, local_pretenuring_feedback_(HashMap::PointersMatch,
kInitialLocalPretenuringFeedbackCapacity), kInitialLocalPretenuringFeedbackCapacity),
new_space_visitor_(collector->heap(), &compaction_spaces_, new_space_visitor_(collector->heap(), &compaction_spaces_,
&old_to_old_slots_, &old_to_new_slots_, &local_slots_buffer_, &local_store_buffer_,
&local_pretenuring_feedback_), &local_pretenuring_feedback_),
old_space_visitor_(collector->heap(), &compaction_spaces_, old_space_visitor_(collector->heap(), &compaction_spaces_,
&old_to_old_slots_, &old_to_new_slots_), &local_slots_buffer_, &local_store_buffer_),
duration_(0.0), duration_(0.0),
bytes_compacted_(0), bytes_compacted_(0),
task_id_(0) {} task_id_(0) {}
...@@ -3018,8 +3134,8 @@ class MarkCompactCollector::Evacuator : public Malloced { ...@@ -3018,8 +3134,8 @@ class MarkCompactCollector::Evacuator : public Malloced {
// Locally cached collector data. // Locally cached collector data.
CompactionSpaceCollection compaction_spaces_; CompactionSpaceCollection compaction_spaces_;
LocalSlotsBuffer old_to_old_slots_; SlotsBuffer* local_slots_buffer_;
LocalSlotsBuffer old_to_new_slots_; LocalStoreBuffer local_store_buffer_;
HashMap local_pretenuring_feedback_; HashMap local_pretenuring_feedback_;
// Vistors for the corresponding spaces. // Vistors for the corresponding spaces.
...@@ -3097,22 +3213,8 @@ void MarkCompactCollector::Evacuator::Finalize() { ...@@ -3097,22 +3213,8 @@ void MarkCompactCollector::Evacuator::Finalize() {
new_space_visitor_.promoted_size() + new_space_visitor_.promoted_size() +
new_space_visitor_.semispace_copied_size()); new_space_visitor_.semispace_copied_size());
heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
// Move locally recorded slots to the global remembered sets. local_store_buffer_.Process(heap()->store_buffer());
old_to_new_slots_.Iterate( collector_->AddEvacuationSlotsBufferSynchronized(local_slots_buffer_);
[](Address slot) {
Page* page = Page::FromAddress(slot);
RememberedSet<OLD_TO_NEW>::Insert(page, slot);
},
[](SlotType type, Address slot) { UNREACHABLE(); });
old_to_old_slots_.Iterate(
[](Address slot) {
Page* page = Page::FromAddress(slot);
RememberedSet<OLD_TO_OLD>::Insert(page, slot);
},
[](SlotType type, Address slot) {
Page* page = Page::FromAddress(slot);
RememberedSet<OLD_TO_OLD>::InsertTyped(page, type, slot);
});
} }
class MarkCompactCollector::CompactionTask : public CancelableTask { class MarkCompactCollector::CompactionTask : public CancelableTask {
...@@ -3419,10 +3521,8 @@ void MarkCompactCollector::InvalidateCode(Code* code) { ...@@ -3419,10 +3521,8 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
// Ignore all slots that might have been recorded in the body of the // Ignore all slots that might have been recorded in the body of the
// deoptimized code object. Assumption: no slots will be recorded for // deoptimized code object. Assumption: no slots will be recorded for
// this object after invalidating it. // this object after invalidating it.
Page* page = Page::FromAddress(code->address()); RemoveObjectSlots(code->instruction_start(),
Address start = code->instruction_start(); code->address() + code->Size());
Address end = code->address() + code->Size();
RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end);
} }
} }
...@@ -3433,6 +3533,21 @@ bool MarkCompactCollector::WillBeDeoptimized(Code* code) { ...@@ -3433,6 +3533,21 @@ bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
} }
void MarkCompactCollector::RemoveObjectSlots(Address start_slot,
Address end_slot) {
// Remove entries by replacing them with an old-space slot containing a smi
// that is located in an unmovable page.
for (Page* p : evacuation_candidates_) {
DCHECK(p->IsEvacuationCandidate() ||
p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
if (p->IsEvacuationCandidate()) {
SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), start_slot,
end_slot);
}
}
}
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
static void VerifyAllBlackObjects(MemoryChunk* page) { static void VerifyAllBlackObjects(MemoryChunk* page) {
LiveObjectIterator<kAllLiveObjects> it(page); LiveObjectIterator<kAllLiveObjects> it(page);
...@@ -3584,7 +3699,30 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { ...@@ -3584,7 +3699,30 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
void MarkCompactCollector::UpdatePointersAfterEvacuation() { void MarkCompactCollector::UpdatePointersAfterEvacuation() {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS); GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
{
GCTracer::Scope gc_scope(
heap()->tracer(),
GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
UpdateSlotsRecordedIn(migration_slots_buffer_);
if (FLAG_trace_fragmentation_verbose) {
PrintF(" migration slots buffer: %d\n",
SlotsBuffer::SizeOfChain(migration_slots_buffer_));
}
slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
DCHECK(migration_slots_buffer_ == NULL);
// TODO(hpayer): Process the slots buffers in parallel. This has to be done
// after evacuation of all pages finishes.
int buffers = evacuation_slots_buffers_.length();
for (int i = 0; i < buffers; i++) {
SlotsBuffer* buffer = evacuation_slots_buffers_[i];
UpdateSlotsRecordedIn(buffer);
slots_buffer_allocator_->DeallocateChain(&buffer);
}
evacuation_slots_buffers_.Rewind(0);
}
// Second pass: find pointers to new space and update them.
PointersUpdatingVisitor updating_visitor(heap()); PointersUpdatingVisitor updating_visitor(heap());
{ {
...@@ -3604,26 +3742,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() { ...@@ -3604,26 +3742,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer); RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer);
} }
{
Heap* heap = this->heap();
GCTracer::Scope gc_scope(
heap->tracer(),
GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
RememberedSet<OLD_TO_OLD>::Iterate(heap, [heap](Address slot) {
PointersUpdatingVisitor::UpdateSlot(heap,
reinterpret_cast<Object**>(slot));
return REMOVE_SLOT;
});
Isolate* isolate = heap->isolate();
PointersUpdatingVisitor* visitor = &updating_visitor;
RememberedSet<OLD_TO_OLD>::IterateTyped(
heap, [isolate, visitor](SlotType type, Address slot) {
UpdateTypedSlot(isolate, visitor, type, slot);
return REMOVE_SLOT;
});
}
{ {
GCTracer::Scope gc_scope( GCTracer::Scope gc_scope(
heap()->tracer(), heap()->tracer(),
...@@ -3633,6 +3751,13 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() { ...@@ -3633,6 +3751,13 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
if (p->IsEvacuationCandidate()) { if (p->IsEvacuationCandidate()) {
UpdateSlotsRecordedIn(p->slots_buffer());
if (FLAG_trace_fragmentation_verbose) {
PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
SlotsBuffer::SizeOfChain(p->slots_buffer()));
}
slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
// Important: skip list should be cleared only after roots were updated // Important: skip list should be cleared only after roots were updated
// because root iteration traverses the stack and might have to find // because root iteration traverses the stack and might have to find
// code objects from non-updated pc pointing into evacuation candidate. // code objects from non-updated pc pointing into evacuation candidate.
...@@ -3894,13 +4019,41 @@ void MarkCompactCollector::Initialize() { ...@@ -3894,13 +4019,41 @@ void MarkCompactCollector::Initialize() {
IncrementalMarking::Initialize(); IncrementalMarking::Initialize();
} }
void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* host, Address slot,
void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) {
if (FLAG_trace_fragmentation) {
PrintF("Page %p is too popular. Disabling evacuation.\n",
reinterpret_cast<void*>(page));
}
isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow);
// TODO(gc) If all evacuation candidates are too popular we
// should stop slots recording entirely.
page->ClearEvacuationCandidate();
DCHECK(!page->IsFlagSet(Page::POPULAR_PAGE));
page->SetFlag(Page::POPULAR_PAGE);
// We were not collecting slots on this page that point
// to other evacuation candidates thus we have to
// rescan the page after evacuation to discover and update all
// pointers to evacuated objects.
page->SetFlag(Page::RESCAN_ON_EVACUATION);
}
void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
Code* target) { Code* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
if (target_page->IsEvacuationCandidate() && if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(host)) { !ShouldSkipEvacuationSlotRecording(object)) {
RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, CODE_ENTRY_SLOT, slot); if (!SlotsBuffer::AddTo(slots_buffer_allocator_,
target_page->slots_buffer_address(),
SlotsBuffer::CODE_ENTRY_SLOT, slot,
SlotsBuffer::FAIL_ON_OVERFLOW)) {
EvictPopularEvacuationCandidate(target_page);
}
} }
} }
...@@ -3914,7 +4067,7 @@ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { ...@@ -3914,7 +4067,7 @@ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
MarkBit mark_bit = Marking::MarkBitFrom(host); MarkBit mark_bit = Marking::MarkBitFrom(host);
if (Marking::IsBlack(mark_bit)) { if (Marking::IsBlack(mark_bit)) {
RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
RecordRelocSlot(host, &rinfo, target); RecordRelocSlot(&rinfo, target);
} }
} }
} }
......
...@@ -25,7 +25,9 @@ class CodeFlusher; ...@@ -25,7 +25,9 @@ class CodeFlusher;
class MarkCompactCollector; class MarkCompactCollector;
class MarkingVisitor; class MarkingVisitor;
class RootMarkingVisitor; class RootMarkingVisitor;
class LocalSlotsBuffer; class SlotsBuffer;
class SlotsBufferAllocator;
class Marking : public AllStatic { class Marking : public AllStatic {
public: public:
...@@ -393,8 +395,8 @@ class MarkCompactCollector { ...@@ -393,8 +395,8 @@ class MarkCompactCollector {
->IsEvacuationCandidate(); ->IsEvacuationCandidate();
} }
void RecordRelocSlot(Code* host, RelocInfo* rinfo, Object* target); void RecordRelocSlot(RelocInfo* rinfo, Object* target);
void RecordCodeEntrySlot(HeapObject* host, Address slot, Code* target); void RecordCodeEntrySlot(HeapObject* object, Address slot, Code* target);
void RecordCodeTargetPatch(Address pc, Code* target); void RecordCodeTargetPatch(Address pc, Code* target);
INLINE(void RecordSlot(HeapObject* object, Object** slot, Object* target)); INLINE(void RecordSlot(HeapObject* object, Object** slot, Object* target));
INLINE(void ForceRecordSlot(HeapObject* object, Object** slot, INLINE(void ForceRecordSlot(HeapObject* object, Object** slot,
...@@ -405,8 +407,8 @@ class MarkCompactCollector { ...@@ -405,8 +407,8 @@ class MarkCompactCollector {
void MigrateObject(HeapObject* dst, HeapObject* src, int size, void MigrateObject(HeapObject* dst, HeapObject* src, int size,
AllocationSpace to_old_space, AllocationSpace to_old_space,
LocalSlotsBuffer* old_to_old_slots, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots); LocalStoreBuffer* local_store_buffer);
void InvalidateCode(Code* code); void InvalidateCode(Code* code);
...@@ -482,8 +484,9 @@ class MarkCompactCollector { ...@@ -482,8 +484,9 @@ class MarkCompactCollector {
// whole transitive closure is known. They must be called before sweeping // whole transitive closure is known. They must be called before sweeping
// when mark bits are still intact. // when mark bits are still intact.
bool IsSlotInBlackObject(Page* p, Address slot, HeapObject** out_object); bool IsSlotInBlackObject(Page* p, Address slot, HeapObject** out_object);
HeapObject* FindBlackObjectBySlotSlow(Address slot); bool IsSlotInBlackObjectSlow(Page* p, Address slot);
bool IsSlotInLiveObject(Address slot); bool IsSlotInLiveObject(Address slot);
void VerifyIsSlotInLiveObject(Address slot, HeapObject* object);
// Removes all the slots in the slot buffers that are within the given // Removes all the slots in the slot buffers that are within the given
// address range. // address range.
...@@ -517,7 +520,8 @@ class MarkCompactCollector { ...@@ -517,7 +520,8 @@ class MarkCompactCollector {
explicit MarkCompactCollector(Heap* heap); explicit MarkCompactCollector(Heap* heap);
bool WillBeDeoptimized(Code* code); bool WillBeDeoptimized(Code* code);
void ClearInvalidRememberedSetSlots(); void EvictPopularEvacuationCandidate(Page* page);
void ClearInvalidStoreAndSlotsBufferEntries();
void StartSweeperThreads(); void StartSweeperThreads();
...@@ -546,6 +550,10 @@ class MarkCompactCollector { ...@@ -546,6 +550,10 @@ class MarkCompactCollector {
bool evacuation_; bool evacuation_;
SlotsBufferAllocator* slots_buffer_allocator_;
SlotsBuffer* migration_slots_buffer_;
// Finishes GC, performs heap verification if enabled. // Finishes GC, performs heap verification if enabled.
void Finish(); void Finish();
...@@ -699,6 +707,9 @@ class MarkCompactCollector { ...@@ -699,6 +707,9 @@ class MarkCompactCollector {
void EvacuateNewSpacePrologue(); void EvacuateNewSpacePrologue();
void EvacuateNewSpaceEpilogue(); void EvacuateNewSpaceEpilogue();
void AddEvacuationSlotsBufferSynchronized(
SlotsBuffer* evacuation_slots_buffer);
void EvacuatePagesInParallel(); void EvacuatePagesInParallel();
// The number of parallel compaction tasks, including the main thread. // The number of parallel compaction tasks, including the main thread.
...@@ -734,8 +745,16 @@ class MarkCompactCollector { ...@@ -734,8 +745,16 @@ class MarkCompactCollector {
// Updates store buffer and slot buffer for a pointer in a migrating object. // Updates store buffer and slot buffer for a pointer in a migrating object.
void RecordMigratedSlot(Object* value, Address slot, void RecordMigratedSlot(Object* value, Address slot,
LocalSlotsBuffer* old_to_old_slots, SlotsBuffer** evacuation_slots_buffer,
LocalSlotsBuffer* old_to_new_slots); LocalStoreBuffer* local_store_buffer);
// Adds the code entry slot to the slots buffer.
void RecordMigratedCodeEntrySlot(Address code_entry, Address code_entry_slot,
SlotsBuffer** evacuation_slots_buffer);
// Adds the slot of a moved code object.
void RecordMigratedCodeObjectSlot(Address code_object,
SlotsBuffer** evacuation_slots_buffer);
#ifdef DEBUG #ifdef DEBUG
friend class MarkObjectVisitor; friend class MarkObjectVisitor;
...@@ -755,6 +774,14 @@ class MarkCompactCollector { ...@@ -755,6 +774,14 @@ class MarkCompactCollector {
List<Page*> evacuation_candidates_; List<Page*> evacuation_candidates_;
List<NewSpacePage*> newspace_evacuation_candidates_; List<NewSpacePage*> newspace_evacuation_candidates_;
// The evacuation_slots_buffers_ are used by the compaction threads.
// When a compaction task finishes, it uses
// AddEvacuationSlotsbufferSynchronized to adds its slots buffer to the
// evacuation_slots_buffers_ list using the evacuation_slots_buffers_mutex_
// lock.
base::Mutex evacuation_slots_buffers_mutex_;
List<SlotsBuffer*> evacuation_slots_buffers_;
base::SmartPointer<FreeList> free_list_old_space_; base::SmartPointer<FreeList> free_list_old_space_;
base::SmartPointer<FreeList> free_list_code_space_; base::SmartPointer<FreeList> free_list_code_space_;
base::SmartPointer<FreeList> free_list_map_space_; base::SmartPointer<FreeList> free_list_map_space_;
......
...@@ -220,12 +220,11 @@ void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer( ...@@ -220,12 +220,11 @@ void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
Heap* heap, RelocInfo* rinfo) { Heap* heap, RelocInfo* rinfo) {
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
HeapObject* object = HeapObject::cast(rinfo->target_object()); HeapObject* object = HeapObject::cast(rinfo->target_object());
Code* host = rinfo->host(); heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, object);
// TODO(ulan): It could be better to record slots only for strongly embedded // TODO(ulan): It could be better to record slots only for strongly embedded
// objects here and record slots for weakly embedded object during clearing // objects here and record slots for weakly embedded object during clearing
// of non-live references in mark-compact. // of non-live references in mark-compact.
if (!host->IsWeakObject(object)) { if (!rinfo->host()->IsWeakObject(object)) {
StaticVisitor::MarkObject(heap, object); StaticVisitor::MarkObject(heap, object);
} }
} }
...@@ -236,9 +235,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap, ...@@ -236,9 +235,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
RelocInfo* rinfo) { RelocInfo* rinfo) {
DCHECK(rinfo->rmode() == RelocInfo::CELL); DCHECK(rinfo->rmode() == RelocInfo::CELL);
Cell* cell = rinfo->target_cell(); Cell* cell = rinfo->target_cell();
Code* host = rinfo->host(); heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell);
heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, cell); if (!rinfo->host()->IsWeakObject(cell)) {
if (!host->IsWeakObject(cell)) {
StaticVisitor::MarkObject(heap, cell); StaticVisitor::MarkObject(heap, cell);
} }
} }
...@@ -250,8 +248,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap, ...@@ -250,8 +248,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
rinfo->IsPatchedDebugBreakSlotSequence()); rinfo->IsPatchedDebugBreakSlotSequence());
Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address()); Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
Code* host = rinfo->host(); heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
StaticVisitor::MarkObject(heap, target); StaticVisitor::MarkObject(heap, target);
} }
...@@ -271,8 +268,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap, ...@@ -271,8 +268,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
rinfo->host()->constant_pool()); rinfo->host()->constant_pool());
target = Code::GetCodeFromTargetAddress(rinfo->target_address()); target = Code::GetCodeFromTargetAddress(rinfo->target_address());
} }
Code* host = rinfo->host(); heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
StaticVisitor::MarkObject(heap, target); StaticVisitor::MarkObject(heap, target);
} }
...@@ -283,8 +279,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence( ...@@ -283,8 +279,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
Code* target = rinfo->code_age_stub(); Code* target = rinfo->code_age_stub();
DCHECK(target != NULL); DCHECK(target != NULL);
Code* host = rinfo->host(); heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
StaticVisitor::MarkObject(heap, target); StaticVisitor::MarkObject(heap, target);
} }
......
...@@ -24,7 +24,8 @@ void RememberedSet<direction>::ClearInvalidSlots(Heap* heap) { ...@@ -24,7 +24,8 @@ void RememberedSet<direction>::ClearInvalidSlots(Heap* heap) {
if (slots != nullptr) { if (slots != nullptr) {
slots->Iterate([heap](Address addr) { slots->Iterate([heap](Address addr) {
Object** slot = reinterpret_cast<Object**>(addr); Object** slot = reinterpret_cast<Object**>(addr);
return IsValidSlot(heap, slot) ? KEEP_SLOT : REMOVE_SLOT; return IsValidSlot(heap, slot) ? SlotSet::KEEP_SLOT
: SlotSet::REMOVE_SLOT;
}); });
} }
} }
...@@ -32,24 +33,17 @@ void RememberedSet<direction>::ClearInvalidSlots(Heap* heap) { ...@@ -32,24 +33,17 @@ void RememberedSet<direction>::ClearInvalidSlots(Heap* heap) {
template <PointerDirection direction> template <PointerDirection direction>
void RememberedSet<direction>::VerifyValidSlots(Heap* heap) { void RememberedSet<direction>::VerifyValidSlots(Heap* heap) {
STATIC_ASSERT(direction == OLD_TO_NEW);
Iterate(heap, [heap](Address addr) { Iterate(heap, [heap](Address addr) {
HeapObject* obj = Object** slot = reinterpret_cast<Object**>(addr);
heap->mark_compact_collector()->FindBlackObjectBySlotSlow(addr); Object* object = *slot;
if (obj == nullptr) { if (Page::FromAddress(addr)->owner() != nullptr &&
// The slot is in dead object. Page::FromAddress(addr)->owner()->identity() == OLD_SPACE) {
MemoryChunk* chunk = MemoryChunk::FromAnyPointerAddress(heap, addr); CHECK(IsValidSlot(heap, slot));
AllocationSpace owner = chunk->owner()->identity(); heap->mark_compact_collector()->VerifyIsSlotInLiveObject(
// The old to old remembered set can have slots in dead objects. This is reinterpret_cast<Address>(slot), HeapObject::cast(object));
// OK because the set is cleared after every mark-compact GC.
// The old to new remembered set is allowed to have slots in dead
// objects only in map and large object space because these spaces cannot
// have raw untaged pointers.
CHECK(direction == OLD_TO_OLD || owner == MAP_SPACE || owner == LO_SPACE);
} else {
int offset = static_cast<int>(addr - obj->address());
CHECK(obj->IsValidSlot(offset));
} }
return KEEP_SLOT; return SlotSet::KEEP_SLOT;
}); });
} }
...@@ -70,7 +64,6 @@ bool RememberedSet<direction>::IsValidSlot(Heap* heap, Object** slot) { ...@@ -70,7 +64,6 @@ bool RememberedSet<direction>::IsValidSlot(Heap* heap, Object** slot) {
template void RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(Heap* heap); template void RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(Heap* heap);
template void RememberedSet<OLD_TO_NEW>::VerifyValidSlots(Heap* heap); template void RememberedSet<OLD_TO_NEW>::VerifyValidSlots(Heap* heap);
template void RememberedSet<OLD_TO_OLD>::VerifyValidSlots(Heap* heap);
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -56,12 +56,10 @@ class RememberedSet { ...@@ -56,12 +56,10 @@ class RememberedSet {
} }
// Iterates and filters the remembered set with the given callback. // Iterates and filters the remembered set with the given callback.
// The callback should take (Address slot) and return SlotCallbackResult. // The callback should take (Address slot) and return SlotSet::CallbackResult.
template <typename Callback> template <typename Callback>
static void Iterate(Heap* heap, Callback callback) { static void Iterate(Heap* heap, Callback callback) {
MemoryChunkIterator it(heap, direction == OLD_TO_OLD PointerChunkIterator it(heap);
? MemoryChunkIterator::ALL
: MemoryChunkIterator::ALL_BUT_CODE_SPACE);
MemoryChunk* chunk; MemoryChunk* chunk;
while ((chunk = it.next()) != nullptr) { while ((chunk = it.next()) != nullptr) {
SlotSet* slots = GetSlotSet(chunk); SlotSet* slots = GetSlotSet(chunk);
...@@ -91,60 +89,6 @@ class RememberedSet { ...@@ -91,60 +89,6 @@ class RememberedSet {
}); });
} }
// Given a page and a typed slot in that page, this function adds the slot
// to the remembered set.
static void InsertTyped(Page* page, SlotType slot_type, Address slot_addr) {
STATIC_ASSERT(direction == OLD_TO_OLD);
TypedSlotSet* slot_set = page->typed_old_to_old_slots();
if (slot_set == nullptr) {
page->AllocateTypedOldToOldSlots();
slot_set = page->typed_old_to_old_slots();
}
uintptr_t offset = slot_addr - page->address();
DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset));
slot_set->Insert(slot_type, static_cast<uint32_t>(offset));
}
// Given a page and a range of typed slots in that page, this function removes
// the slots from the remembered set.
static void RemoveRangeTyped(Page* page, Address start, Address end) {
TypedSlotSet* slots = page->typed_old_to_old_slots();
if (slots != nullptr) {
slots->Iterate([start, end](SlotType slot_type, Address slot_addr) {
return start <= slot_addr && slot_addr < end ? REMOVE_SLOT : KEEP_SLOT;
});
}
}
// Iterates and filters typed old to old pointers with the given callback.
// The callback should take (SlotType slot_type, Address slot_addr) and
// return SlotCallbackResult.
template <typename Callback>
static void IterateTyped(Heap* heap, Callback callback) {
MemoryChunkIterator it(heap, MemoryChunkIterator::ALL_BUT_MAP_SPACE);
MemoryChunk* chunk;
while ((chunk = it.next()) != nullptr) {
TypedSlotSet* slots = chunk->typed_old_to_old_slots();
if (slots != nullptr) {
int new_count = slots->Iterate(callback);
if (new_count == 0) {
chunk->ReleaseTypedOldToOldSlots();
}
}
}
}
// Clear all old to old slots from the remembered set.
static void ClearAll(Heap* heap) {
STATIC_ASSERT(direction == OLD_TO_OLD);
MemoryChunkIterator it(heap, MemoryChunkIterator::ALL);
MemoryChunk* chunk;
while ((chunk = it.next()) != nullptr) {
chunk->ReleaseOldToOldSlots();
chunk->ReleaseTypedOldToOldSlots();
}
}
// Eliminates all stale slots from the remembered set, i.e. // Eliminates all stale slots from the remembered set, i.e.
// slots that are not part of live objects anymore. This method must be // slots that are not part of live objects anymore. This method must be
// called after marking, when the whole transitive closure is known and // called after marking, when the whole transitive closure is known and
...@@ -181,7 +125,7 @@ class RememberedSet { ...@@ -181,7 +125,7 @@ class RememberedSet {
} }
template <typename Callback> template <typename Callback>
static SlotCallbackResult Wrapper(Heap* heap, Address slot_address, static SlotSet::CallbackResult Wrapper(Heap* heap, Address slot_address,
Callback slot_callback) { Callback slot_callback) {
STATIC_ASSERT(direction == OLD_TO_NEW); STATIC_ASSERT(direction == OLD_TO_NEW);
Object** slot = reinterpret_cast<Object**>(slot_address); Object** slot = reinterpret_cast<Object**>(slot_address);
...@@ -196,97 +140,17 @@ class RememberedSet { ...@@ -196,97 +140,17 @@ class RememberedSet {
// Unfortunately, we do not know about the slot. It could be in a // Unfortunately, we do not know about the slot. It could be in a
// just freed free space object. // just freed free space object.
if (heap->InToSpace(object)) { if (heap->InToSpace(object)) {
return KEEP_SLOT; return SlotSet::KEEP_SLOT;
} }
} else { } else {
DCHECK(!heap->InNewSpace(object)); DCHECK(!heap->InNewSpace(object));
} }
return REMOVE_SLOT; return SlotSet::REMOVE_SLOT;
} }
static bool IsValidSlot(Heap* heap, Object** slot); static bool IsValidSlot(Heap* heap, Object** slot);
}; };
// Buffer for keeping thead local migration slots during compaction.
// TODO(ulan): Remove this once every thread gets local pages in compaction
// space.
class LocalSlotsBuffer BASE_EMBEDDED {
public:
LocalSlotsBuffer() : top_(new Node(nullptr)) {}
~LocalSlotsBuffer() {
Node* current = top_;
while (current != nullptr) {
Node* tmp = current->next;
delete current;
current = tmp;
}
}
void Record(Address addr) {
EnsureSpaceFor(1);
uintptr_t entry = reinterpret_cast<uintptr_t>(addr);
DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES));
Insert(entry);
}
void Record(SlotType type, Address addr) {
EnsureSpaceFor(2);
Insert(static_cast<uintptr_t>(type));
uintptr_t entry = reinterpret_cast<uintptr_t>(addr);
DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES));
Insert(entry);
}
template <typename UntypedCallback, typename TypedCallback>
void Iterate(UntypedCallback untyped_callback, TypedCallback typed_callback) {
Node* current = top_;
bool typed = false;
SlotType type;
Address addr;
while (current != nullptr) {
for (int i = 0; i < current->count; i++) {
uintptr_t entry = current->buffer[i];
if (entry < NUMBER_OF_SLOT_TYPES) {
DCHECK(!typed);
typed = true;
type = static_cast<SlotType>(entry);
} else {
addr = reinterpret_cast<Address>(entry);
if (typed) {
typed_callback(type, addr);
typed = false;
} else {
untyped_callback(addr);
}
}
}
current = current->next;
}
}
private:
void EnsureSpaceFor(int count) {
if (top_->remaining_free_slots() < count) top_ = new Node(top_);
}
void Insert(uintptr_t entry) { top_->buffer[top_->count++] = entry; }
static const int kBufferSize = 16 * KB;
struct Node : Malloced {
explicit Node(Node* next_node) : next(next_node), count(0) {}
inline int remaining_free_slots() { return kBufferSize - count; }
Node* next;
uintptr_t buffer[kBufferSize];
int count;
};
Node* top_;
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -7,13 +7,10 @@ ...@@ -7,13 +7,10 @@
#include "src/allocation.h" #include "src/allocation.h"
#include "src/base/bits.h" #include "src/base/bits.h"
#include "src/utils.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
enum SlotCallbackResult { KEEP_SLOT, REMOVE_SLOT };
// Data structure for maintaining a set of slots in a standard (non-large) // Data structure for maintaining a set of slots in a standard (non-large)
// page. The base address of the page must be set with SetPageStart before any // page. The base address of the page must be set with SetPageStart before any
// operation. // operation.
...@@ -22,6 +19,8 @@ enum SlotCallbackResult { KEEP_SLOT, REMOVE_SLOT }; ...@@ -22,6 +19,8 @@ enum SlotCallbackResult { KEEP_SLOT, REMOVE_SLOT };
// Each bucket is a bitmap with a bit corresponding to a single slot offset. // Each bucket is a bitmap with a bit corresponding to a single slot offset.
class SlotSet : public Malloced { class SlotSet : public Malloced {
public: public:
enum CallbackResult { KEEP_SLOT, REMOVE_SLOT };
SlotSet() { SlotSet() {
for (int i = 0; i < kBuckets; i++) { for (int i = 0; i < kBuckets; i++) {
bucket[i] = nullptr; bucket[i] = nullptr;
...@@ -214,124 +213,6 @@ class SlotSet : public Malloced { ...@@ -214,124 +213,6 @@ class SlotSet : public Malloced {
Address page_start_; Address page_start_;
}; };
enum SlotType {
EMBEDDED_OBJECT_SLOT,
OBJECT_SLOT,
RELOCATED_CODE_OBJECT,
CELL_TARGET_SLOT,
CODE_TARGET_SLOT,
CODE_ENTRY_SLOT,
DEBUG_TARGET_SLOT,
NUMBER_OF_SLOT_TYPES
};
// Data structure for maintaining a multiset of typed slots in a page.
// Typed slots can only appear in Code and JSFunction objects, so
// the maximum possible offset is limited by the LargePage::kMaxCodePageSize.
// The implementation is a chain of chunks, where each chunks is an array of
// encoded (slot type, slot offset) pairs.
// There is no duplicate detection and we do not expect many duplicates because
// typed slots contain V8 internal pointers that are not directly exposed to JS.
class TypedSlotSet {
public:
typedef uint32_t TypedSlot;
static const int kMaxOffset = 1 << 29;
explicit TypedSlotSet(Address page_start) : page_start_(page_start) {
chunk_ = new Chunk(nullptr, kInitialBufferSize);
}
~TypedSlotSet() {
Chunk* chunk = chunk_;
while (chunk != nullptr) {
Chunk* next = chunk->next;
delete chunk;
chunk = next;
}
}
// The slot offset specifies a slot at address page_start_ + offset.
void Insert(SlotType type, int offset) {
TypedSlot slot = ToTypedSlot(type, offset);
if (!chunk_->AddSlot(slot)) {
chunk_ = new Chunk(chunk_, NextCapacity(chunk_->capacity));
bool added = chunk_->AddSlot(slot);
DCHECK(added);
USE(added);
}
}
// Iterate over all slots in the set and for each slot invoke the callback.
// If the callback returns REMOVE_SLOT then the slot is removed from the set.
// Returns the new number of slots.
//
// Sample usage:
// Iterate([](SlotType slot_type, Address slot_address) {
// if (good(slot_type, slot_address)) return KEEP_SLOT;
// else return REMOVE_SLOT;
// });
template <typename Callback>
int Iterate(Callback callback) {
STATIC_ASSERT(NUMBER_OF_SLOT_TYPES < 8);
const TypedSlot kRemovedSlot = TypeField::encode(NUMBER_OF_SLOT_TYPES);
Chunk* chunk = chunk_;
int new_count = 0;
while (chunk != nullptr) {
TypedSlot* buffer = chunk->buffer;
int count = chunk->count;
for (int i = 0; i < count; i++) {
TypedSlot slot = buffer[i];
if (slot != kRemovedSlot) {
SlotType type = TypeField::decode(slot);
Address addr = page_start_ + OffsetField::decode(slot);
if (callback(type, addr) == KEEP_SLOT) {
new_count++;
} else {
buffer[i] = kRemovedSlot;
}
}
}
chunk = chunk->next;
}
return new_count;
}
private:
static const int kInitialBufferSize = 100;
static const int kMaxBufferSize = 16 * KB;
static int NextCapacity(int capacity) {
return Min(kMaxBufferSize, capacity * 2);
}
static TypedSlot ToTypedSlot(SlotType type, int offset) {
return TypeField::encode(type) | OffsetField::encode(offset);
}
class OffsetField : public BitField<int, 0, 29> {};
class TypeField : public BitField<SlotType, 29, 3> {};
struct Chunk : Malloced {
explicit Chunk(Chunk* next_chunk, int capacity)
: next(next_chunk), count(0), capacity(capacity) {
buffer = NewArray<TypedSlot>(capacity);
}
bool AddSlot(TypedSlot slot) {
if (count == capacity) return false;
buffer[count++] = slot;
return true;
}
~Chunk() { DeleteArray(buffer); }
Chunk* next;
int count;
int capacity;
TypedSlot* buffer;
};
Address page_start_;
Chunk* chunk_;
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/slots-buffer.h"
#include "src/assembler.h"
#include "src/heap/heap.h"
#include "src/objects-inl.h"
namespace v8 {
namespace internal {
bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) {
return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES;
}
bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator,
SlotsBuffer** buffer_address, SlotType type,
Address addr, AdditionMode mode) {
SlotsBuffer* buffer = *buffer_address;
if (buffer == NULL || !buffer->HasSpaceForTypedSlot()) {
if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) {
allocator->DeallocateChain(buffer_address);
return false;
}
buffer = allocator->AllocateBuffer(buffer);
*buffer_address = buffer;
}
DCHECK(buffer->HasSpaceForTypedSlot());
buffer->Add(reinterpret_cast<ObjectSlot>(type));
buffer->Add(reinterpret_cast<ObjectSlot>(addr));
return true;
}
void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) {
// Remove entries by replacing them with an old-space slot containing a smi
// that is located in an unmovable page.
const ObjectSlot kRemovedEntry = HeapObject::RawField(
heap->empty_fixed_array(), FixedArrayBase::kLengthOffset);
DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry))
->NeverEvacuate());
while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;
for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
// Slots are invalid when they currently:
// - do not point to a heap object (SMI)
// - point to a heap object in new space
// - are not within a live heap object on a valid pointer slot
// - point to a heap object not on an evacuation candidate
// TODO(mlippautz): Move InNewSpace check above IsSlotInLiveObject once
// we filter out unboxed double slots eagerly.
if (!object->IsHeapObject() ||
!heap->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<Address>(slot)) ||
heap->InNewSpace(object) ||
!Page::FromAddress(reinterpret_cast<Address>(object))
->IsEvacuationCandidate()) {
// TODO(hpayer): Instead of replacing slots with kRemovedEntry we
// could shrink the slots buffer in-place.
slots[slot_idx] = kRemovedEntry;
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}
void SlotsBuffer::RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer,
Address start_slot, Address end_slot) {
// Remove entries by replacing them with an old-space slot containing a smi
// that is located in an unmovable page.
const ObjectSlot kRemovedEntry = HeapObject::RawField(
heap->empty_fixed_array(), FixedArrayBase::kLengthOffset);
DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry))
->NeverEvacuate());
while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;
bool is_typed_slot = false;
for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Address slot_address = reinterpret_cast<Address>(slot);
if (slot_address >= start_slot && slot_address < end_slot) {
// TODO(hpayer): Instead of replacing slots with kRemovedEntry we
// could shrink the slots buffer in-place.
slots[slot_idx] = kRemovedEntry;
if (is_typed_slot) {
slots[slot_idx - 1] = kRemovedEntry;
}
}
is_typed_slot = false;
} else {
is_typed_slot = true;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}
void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;
for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
if (object->IsHeapObject()) {
HeapObject* heap_object = HeapObject::cast(object);
CHECK(!heap->InNewSpace(object));
heap->mark_compact_collector()->VerifyIsSlotInLiveObject(
reinterpret_cast<Address>(slot), heap_object);
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}
SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) {
return new SlotsBuffer(next_buffer);
}
void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) {
delete buffer;
}
void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) {
SlotsBuffer* buffer = *buffer_address;
while (buffer != NULL) {
SlotsBuffer* next_buffer = buffer->next();
DeallocateBuffer(buffer);
buffer = next_buffer;
}
*buffer_address = NULL;
}
} // namespace internal
} // namespace v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_SLOTS_BUFFER_H_
#define V8_HEAP_SLOTS_BUFFER_H_
#include "src/objects.h"
namespace v8 {
namespace internal {
// Forward declarations.
class SlotsBuffer;
// SlotsBufferAllocator manages the allocation and deallocation of slots buffer
// chunks and links them together. Slots buffer chunks are always created by the
// SlotsBufferAllocator.
class SlotsBufferAllocator {
public:
SlotsBuffer* AllocateBuffer(SlotsBuffer* next_buffer);
void DeallocateBuffer(SlotsBuffer* buffer);
void DeallocateChain(SlotsBuffer** buffer_address);
};
// SlotsBuffer records a sequence of slots that has to be updated
// after live objects were relocated from evacuation candidates.
// All slots are either untyped or typed:
// - Untyped slots are expected to contain a tagged object pointer.
// They are recorded by an address.
// - Typed slots are expected to contain an encoded pointer to a heap
// object where the way of encoding depends on the type of the slot.
// They are recorded as a pair (SlotType, slot address).
// We assume that zero-page is never mapped this allows us to distinguish
// untyped slots from typed slots during iteration by a simple comparison:
// if element of slots buffer is less than NUMBER_OF_SLOT_TYPES then it
// is the first element of typed slot's pair.
class SlotsBuffer {
public:
typedef Object** ObjectSlot;
explicit SlotsBuffer(SlotsBuffer* next_buffer)
: idx_(0), chain_length_(1), next_(next_buffer) {
if (next_ != NULL) {
chain_length_ = next_->chain_length_ + 1;
}
}
~SlotsBuffer() {}
void Add(ObjectSlot slot) {
DCHECK(0 <= idx_ && idx_ < kNumberOfElements);
#ifdef DEBUG
if (slot >= reinterpret_cast<ObjectSlot>(NUMBER_OF_SLOT_TYPES)) {
DCHECK_NOT_NULL(*slot);
}
#endif
slots_[idx_++] = slot;
}
ObjectSlot Get(intptr_t i) {
DCHECK(i >= 0 && i < kNumberOfElements);
return slots_[i];
}
size_t Size() {
DCHECK(idx_ <= kNumberOfElements);
return idx_;
}
enum SlotType {
EMBEDDED_OBJECT_SLOT,
OBJECT_SLOT,
RELOCATED_CODE_OBJECT,
CELL_TARGET_SLOT,
CODE_TARGET_SLOT,
CODE_ENTRY_SLOT,
DEBUG_TARGET_SLOT,
NUMBER_OF_SLOT_TYPES
};
static const char* SlotTypeToString(SlotType type) {
switch (type) {
case EMBEDDED_OBJECT_SLOT:
return "EMBEDDED_OBJECT_SLOT";
case OBJECT_SLOT:
return "OBJECT_SLOT";
case RELOCATED_CODE_OBJECT:
return "RELOCATED_CODE_OBJECT";
case CELL_TARGET_SLOT:
return "CELL_TARGET_SLOT";
case CODE_TARGET_SLOT:
return "CODE_TARGET_SLOT";
case CODE_ENTRY_SLOT:
return "CODE_ENTRY_SLOT";
case DEBUG_TARGET_SLOT:
return "DEBUG_TARGET_SLOT";
case NUMBER_OF_SLOT_TYPES:
return "NUMBER_OF_SLOT_TYPES";
}
return "UNKNOWN SlotType";
}
SlotsBuffer* next() { return next_; }
static int SizeOfChain(SlotsBuffer* buffer) {
if (buffer == NULL) return 0;
return static_cast<int>(buffer->idx_ +
(buffer->chain_length_ - 1) * kNumberOfElements);
}
inline bool IsFull() { return idx_ == kNumberOfElements; }
inline bool HasSpaceForTypedSlot() { return idx_ < kNumberOfElements - 1; }
enum AdditionMode { FAIL_ON_OVERFLOW, IGNORE_OVERFLOW };
static bool ChainLengthThresholdReached(SlotsBuffer* buffer) {
return buffer != NULL && buffer->chain_length_ >= kChainLengthThreshold;
}
INLINE(static bool AddTo(SlotsBufferAllocator* allocator,
SlotsBuffer** buffer_address, ObjectSlot slot,
AdditionMode mode)) {
SlotsBuffer* buffer = *buffer_address;
if (buffer == NULL || buffer->IsFull()) {
if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) {
allocator->DeallocateChain(buffer_address);
return false;
}
buffer = allocator->AllocateBuffer(buffer);
*buffer_address = buffer;
}
buffer->Add(slot);
return true;
}
static bool IsTypedSlot(ObjectSlot slot);
static bool AddTo(SlotsBufferAllocator* allocator,
SlotsBuffer** buffer_address, SlotType type, Address addr,
AdditionMode mode);
// Eliminates all stale entries from the slots buffer, i.e., slots that
// are not part of live objects anymore. This method must be called after
// marking, when the whole transitive closure is known and must be called
// before sweeping when mark bits are still intact.
static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer);
// Eliminate all slots that are within the given address range.
static void RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer,
Address start_slot, Address end_slot);
// Ensures that there are no invalid slots in the chain of slots buffers.
static void VerifySlots(Heap* heap, SlotsBuffer* buffer);
static const int kNumberOfElements = 1021;
private:
static const int kChainLengthThreshold = 15;
intptr_t idx_;
intptr_t chain_length_;
SlotsBuffer* next_;
ObjectSlot slots_[kNumberOfElements];
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_SLOTS_BUFFER_H_
...@@ -147,19 +147,6 @@ HeapObject* HeapObjectIterator::FromCurrentPage() { ...@@ -147,19 +147,6 @@ HeapObject* HeapObjectIterator::FromCurrentPage() {
return NULL; return NULL;
} }
// -----------------------------------------------------------------------------
// LargePageIterator
LargePageIterator::LargePageIterator(LargeObjectSpace* space)
: next_page_(space->first_page()) {}
LargePage* LargePageIterator::next() {
LargePage* result = next_page_;
if (next_page_ != nullptr) {
next_page_ = next_page_->next_page();
}
return result;
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// MemoryAllocator // MemoryAllocator
...@@ -321,15 +308,15 @@ Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) { ...@@ -321,15 +308,15 @@ Page* Page::FromAnyPointerAddress(Heap* heap, Address addr) {
return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr)); return static_cast<Page*>(MemoryChunk::FromAnyPointerAddress(heap, addr));
} }
MemoryChunkIterator::MemoryChunkIterator(Heap* heap, Mode mode)
PointerChunkIterator::PointerChunkIterator(Heap* heap)
: state_(kOldSpaceState), : state_(kOldSpaceState),
mode_(mode),
old_iterator_(heap->old_space()), old_iterator_(heap->old_space()),
code_iterator_(heap->code_space()),
map_iterator_(heap->map_space()), map_iterator_(heap->map_space()),
lo_iterator_(heap->lo_space()) {} lo_iterator_(heap->lo_space()) {}
MemoryChunk* MemoryChunkIterator::next() {
MemoryChunk* PointerChunkIterator::next() {
switch (state_) { switch (state_) {
case kOldSpaceState: { case kOldSpaceState: {
if (old_iterator_.has_next()) { if (old_iterator_.has_next()) {
...@@ -339,34 +326,33 @@ MemoryChunk* MemoryChunkIterator::next() { ...@@ -339,34 +326,33 @@ MemoryChunk* MemoryChunkIterator::next() {
// Fall through. // Fall through.
} }
case kMapState: { case kMapState: {
if (mode_ != ALL_BUT_MAP_SPACE && map_iterator_.has_next()) { if (map_iterator_.has_next()) {
return map_iterator_.next(); return map_iterator_.next();
} }
state_ = kCodeState;
// Fall through.
}
case kCodeState: {
if (mode_ != ALL_BUT_CODE_SPACE && code_iterator_.has_next()) {
return code_iterator_.next();
}
state_ = kLargeObjectState; state_ = kLargeObjectState;
// Fall through. // Fall through.
} }
case kLargeObjectState: { case kLargeObjectState: {
MemoryChunk* answer = lo_iterator_.next(); HeapObject* heap_object;
if (answer != nullptr) { do {
return answer; heap_object = lo_iterator_.Next();
} if (heap_object == NULL) {
state_ = kFinishedState; state_ = kFinishedState;
// Fall through; return NULL;
}
// Fixed arrays are the only pointer-containing objects in large
// object space.
} while (!heap_object->IsFixedArray());
MemoryChunk* answer = MemoryChunk::FromAddress(heap_object->address());
return answer;
} }
case kFinishedState: case kFinishedState:
return nullptr; return NULL;
default: default:
break; break;
} }
UNREACHABLE(); UNREACHABLE();
return nullptr; return NULL;
} }
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/full-codegen/full-codegen.h" #include "src/full-codegen/full-codegen.h"
#include "src/heap/slot-set.h" #include "src/heap/slot-set.h"
#include "src/heap/slots-buffer.h"
#include "src/macro-assembler.h" #include "src/macro-assembler.h"
#include "src/msan.h" #include "src/msan.h"
#include "src/snapshot/snapshot.h" #include "src/snapshot/snapshot.h"
...@@ -477,9 +478,9 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, ...@@ -477,9 +478,9 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->flags_ = 0; chunk->flags_ = 0;
chunk->set_owner(owner); chunk->set_owner(owner);
chunk->InitializeReservedMemory(); chunk->InitializeReservedMemory();
chunk->slots_buffer_ = nullptr;
chunk->old_to_new_slots_ = nullptr; chunk->old_to_new_slots_ = nullptr;
chunk->old_to_old_slots_ = nullptr; chunk->old_to_old_slots_ = nullptr;
chunk->typed_old_to_old_slots_ = nullptr;
chunk->skip_list_ = nullptr; chunk->skip_list_ = nullptr;
chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity; chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity;
chunk->progress_bar_ = 0; chunk->progress_bar_ = 0;
...@@ -731,10 +732,6 @@ LargePage* MemoryAllocator::AllocateLargePage(intptr_t object_size, ...@@ -731,10 +732,6 @@ LargePage* MemoryAllocator::AllocateLargePage(intptr_t object_size,
MemoryChunk* chunk = MemoryChunk* chunk =
AllocateChunk(object_size, object_size, executable, owner); AllocateChunk(object_size, object_size, executable, owner);
if (chunk == NULL) return NULL; if (chunk == NULL) return NULL;
if (executable && chunk->size() > LargePage::kMaxCodePageSize) {
STATIC_ASSERT(LargePage::kMaxCodePageSize <= TypedSlotSet::kMaxOffset);
FATAL("Code page is too large.");
}
return LargePage::Initialize(isolate_->heap(), chunk); return LargePage::Initialize(isolate_->heap(), chunk);
} }
...@@ -935,6 +932,8 @@ bool MemoryAllocator::CommitExecutableMemory(base::VirtualMemory* vm, ...@@ -935,6 +932,8 @@ bool MemoryAllocator::CommitExecutableMemory(base::VirtualMemory* vm,
// MemoryChunk implementation // MemoryChunk implementation
void MemoryChunk::ReleaseAllocatedMemory() { void MemoryChunk::ReleaseAllocatedMemory() {
delete slots_buffer_;
slots_buffer_ = nullptr;
delete skip_list_; delete skip_list_;
skip_list_ = nullptr; skip_list_ = nullptr;
delete mutex_; delete mutex_;
...@@ -973,15 +972,6 @@ void MemoryChunk::ReleaseOldToOldSlots() { ...@@ -973,15 +972,6 @@ void MemoryChunk::ReleaseOldToOldSlots() {
old_to_old_slots_ = nullptr; old_to_old_slots_ = nullptr;
} }
void MemoryChunk::AllocateTypedOldToOldSlots() {
DCHECK(nullptr == typed_old_to_old_slots_);
typed_old_to_old_slots_ = new TypedSlotSet(address());
}
void MemoryChunk::ReleaseTypedOldToOldSlots() {
delete typed_old_to_old_slots_;
typed_old_to_old_slots_ = nullptr;
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// PagedSpace implementation // PagedSpace implementation
......
...@@ -32,7 +32,6 @@ class SemiSpace; ...@@ -32,7 +32,6 @@ class SemiSpace;
class SkipList; class SkipList;
class SlotsBuffer; class SlotsBuffer;
class SlotSet; class SlotSet;
class TypedSlotSet;
class Space; class Space;
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
...@@ -393,13 +392,13 @@ class MemoryChunk { ...@@ -393,13 +392,13 @@ class MemoryChunk {
+ kPointerSize // Heap* heap_ + kPointerSize // Heap* heap_
+ kIntSize; // int progress_bar_ + kIntSize; // int progress_bar_
static const size_t kOldToNewSlotsOffset = static const size_t kSlotsBufferOffset =
kLiveBytesOffset + kIntSize; // int live_byte_count_ kLiveBytesOffset + kIntSize; // int live_byte_count_
static const size_t kWriteBarrierCounterOffset = static const size_t kWriteBarrierCounterOffset =
kOldToNewSlotsOffset + kPointerSize // SlotSet* old_to_new_slots_; kSlotsBufferOffset + kPointerSize // SlotsBuffer* slots_buffer_;
+ kPointerSize // SlotSet* old_to_new_slots_;
+ kPointerSize // SlotSet* old_to_old_slots_; + kPointerSize // SlotSet* old_to_old_slots_;
+ kPointerSize // TypedSlotSet* typed_old_to_old_slots_;
+ kPointerSize; // SkipList* skip_list_; + kPointerSize; // SkipList* skip_list_;
static const size_t kMinHeaderSize = static const size_t kMinHeaderSize =
...@@ -510,18 +509,17 @@ class MemoryChunk { ...@@ -510,18 +509,17 @@ class MemoryChunk {
inline void set_skip_list(SkipList* skip_list) { skip_list_ = skip_list; } inline void set_skip_list(SkipList* skip_list) { skip_list_ = skip_list; }
inline SlotsBuffer* slots_buffer() { return slots_buffer_; }
inline SlotsBuffer** slots_buffer_address() { return &slots_buffer_; }
inline SlotSet* old_to_new_slots() { return old_to_new_slots_; } inline SlotSet* old_to_new_slots() { return old_to_new_slots_; }
inline SlotSet* old_to_old_slots() { return old_to_old_slots_; } inline SlotSet* old_to_old_slots() { return old_to_old_slots_; }
inline TypedSlotSet* typed_old_to_old_slots() {
return typed_old_to_old_slots_;
}
void AllocateOldToNewSlots(); void AllocateOldToNewSlots();
void ReleaseOldToNewSlots(); void ReleaseOldToNewSlots();
void AllocateOldToOldSlots(); void AllocateOldToOldSlots();
void ReleaseOldToOldSlots(); void ReleaseOldToOldSlots();
void AllocateTypedOldToOldSlots();
void ReleaseTypedOldToOldSlots();
Address area_start() { return area_start_; } Address area_start() { return area_start_; }
Address area_end() { return area_end_; } Address area_end() { return area_end_; }
...@@ -595,14 +593,12 @@ class MemoryChunk { ...@@ -595,14 +593,12 @@ class MemoryChunk {
void MarkEvacuationCandidate() { void MarkEvacuationCandidate() {
DCHECK(!IsFlagSet(NEVER_EVACUATE)); DCHECK(!IsFlagSet(NEVER_EVACUATE));
DCHECK_NULL(old_to_old_slots_); DCHECK_NULL(slots_buffer_);
DCHECK_NULL(typed_old_to_old_slots_);
SetFlag(EVACUATION_CANDIDATE); SetFlag(EVACUATION_CANDIDATE);
} }
void ClearEvacuationCandidate() { void ClearEvacuationCandidate() {
DCHECK_NULL(old_to_old_slots_); DCHECK(slots_buffer_ == NULL);
DCHECK_NULL(typed_old_to_old_slots_);
ClearFlag(EVACUATION_CANDIDATE); ClearFlag(EVACUATION_CANDIDATE);
} }
...@@ -687,12 +683,13 @@ class MemoryChunk { ...@@ -687,12 +683,13 @@ class MemoryChunk {
// Count of bytes marked black on page. // Count of bytes marked black on page.
int live_byte_count_; int live_byte_count_;
SlotsBuffer* slots_buffer_;
// A single slot set for small pages (of size kPageSize) or an array of slot // A single slot set for small pages (of size kPageSize) or an array of slot
// set for large pages. In the latter case the number of entries in the array // set for large pages. In the latter case the number of entries in the array
// is ceil(size() / kPageSize). // is ceil(size() / kPageSize).
SlotSet* old_to_new_slots_; SlotSet* old_to_new_slots_;
SlotSet* old_to_old_slots_; SlotSet* old_to_old_slots_;
TypedSlotSet* typed_old_to_old_slots_;
SkipList* skip_list_; SkipList* skip_list_;
...@@ -865,12 +862,6 @@ class LargePage : public MemoryChunk { ...@@ -865,12 +862,6 @@ class LargePage : public MemoryChunk {
inline void set_next_page(LargePage* page) { set_next_chunk(page); } inline void set_next_page(LargePage* page) { set_next_chunk(page); }
// A limit to guarantee that we do not overflow typed slot offset in
// the old to old remembered set.
// Note that this limit is higher than what assembler already imposes on
// x64 and ia32 architectures.
static const int kMaxCodePageSize = 512 * MB;
private: private:
static inline LargePage* Initialize(Heap* heap, MemoryChunk* chunk); static inline LargePage* Initialize(Heap* heap, MemoryChunk* chunk);
...@@ -986,8 +977,8 @@ class MemoryChunkValidator { ...@@ -986,8 +977,8 @@ class MemoryChunkValidator {
STATIC_ASSERT(MemoryChunk::kSizeOffset == offsetof(MemoryChunk, size_)); STATIC_ASSERT(MemoryChunk::kSizeOffset == offsetof(MemoryChunk, size_));
STATIC_ASSERT(MemoryChunk::kLiveBytesOffset == STATIC_ASSERT(MemoryChunk::kLiveBytesOffset ==
offsetof(MemoryChunk, live_byte_count_)); offsetof(MemoryChunk, live_byte_count_));
STATIC_ASSERT(MemoryChunk::kOldToNewSlotsOffset == STATIC_ASSERT(MemoryChunk::kSlotsBufferOffset ==
offsetof(MemoryChunk, old_to_new_slots_)); offsetof(MemoryChunk, slots_buffer_));
STATIC_ASSERT(MemoryChunk::kWriteBarrierCounterOffset == STATIC_ASSERT(MemoryChunk::kWriteBarrierCounterOffset ==
offsetof(MemoryChunk, write_barrier_counter_)); offsetof(MemoryChunk, write_barrier_counter_));
...@@ -2997,42 +2988,25 @@ class LargeObjectIterator : public ObjectIterator { ...@@ -2997,42 +2988,25 @@ class LargeObjectIterator : public ObjectIterator {
LargePage* current_; LargePage* current_;
}; };
class LargePageIterator BASE_EMBEDDED {
public:
explicit inline LargePageIterator(LargeObjectSpace* space);
inline LargePage* next();
private:
LargePage* next_page_;
};
// Iterates over the chunks (pages and large object pages) that can contain // Iterates over the chunks (pages and large object pages) that can contain
// pointers to new space or to evacuation candidates. // pointers to new space.
class MemoryChunkIterator BASE_EMBEDDED { class PointerChunkIterator BASE_EMBEDDED {
public: public:
enum Mode { ALL, ALL_BUT_MAP_SPACE, ALL_BUT_CODE_SPACE }; inline explicit PointerChunkIterator(Heap* heap);
inline explicit MemoryChunkIterator(Heap* heap, Mode mode);
// Return NULL when the iterator is done. // Return NULL when the iterator is done.
inline MemoryChunk* next(); inline MemoryChunk* next();
private: private:
enum State { enum State { kOldSpaceState, kMapState, kLargeObjectState, kFinishedState };
kOldSpaceState,
kMapState,
kCodeState,
kLargeObjectState,
kFinishedState
};
State state_; State state_;
const Mode mode_;
PageIterator old_iterator_; PageIterator old_iterator_;
PageIterator code_iterator_;
PageIterator map_iterator_; PageIterator map_iterator_;
LargePageIterator lo_iterator_; LargeObjectIterator lo_iterator_;
}; };
#ifdef DEBUG #ifdef DEBUG
struct CommentStatistic { struct CommentStatistic {
const char* comment; const char* comment;
......
...@@ -13,6 +13,22 @@ ...@@ -13,6 +13,22 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
void LocalStoreBuffer::Record(Address addr) {
if (top_->is_full()) top_ = new Node(top_);
top_->buffer[top_->count++] = addr;
}
void LocalStoreBuffer::Process(StoreBuffer* store_buffer) {
Node* current = top_;
while (current != nullptr) {
for (int i = 0; i < current->count; i++) {
Address slot = current->buffer[i];
Page* page = Page::FromAnyPointerAddress(heap_, slot);
RememberedSet<OLD_TO_NEW>::Insert(page, slot);
}
current = current->next;
}
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -40,6 +40,41 @@ class StoreBuffer { ...@@ -40,6 +40,41 @@ class StoreBuffer {
base::VirtualMemory* virtual_memory_; base::VirtualMemory* virtual_memory_;
}; };
class LocalStoreBuffer BASE_EMBEDDED {
public:
explicit LocalStoreBuffer(Heap* heap)
: top_(new Node(nullptr)), heap_(heap) {}
~LocalStoreBuffer() {
Node* current = top_;
while (current != nullptr) {
Node* tmp = current->next;
delete current;
current = tmp;
}
}
inline void Record(Address addr);
inline void Process(StoreBuffer* store_buffer);
private:
static const int kBufferSize = 16 * KB;
struct Node : Malloced {
explicit Node(Node* next_node) : next(next_node), count(0) {}
inline bool is_full() { return count == kBufferSize; }
Node* next;
Address buffer[kBufferSize];
int count;
};
Node* top_;
Heap* heap_;
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -168,6 +168,7 @@ ...@@ -168,6 +168,7 @@
'test-sampler-api.cc', 'test-sampler-api.cc',
'test-serialize.cc', 'test-serialize.cc',
'test-simd.cc', 'test-simd.cc',
'test-slots-buffer.cc',
'test-strings.cc', 'test-strings.cc',
'test-symbols.cc', 'test-symbols.cc',
'test-strtod.cc', 'test-strtod.cc',
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "src/factory.h" #include "src/factory.h"
#include "src/field-type.h" #include "src/field-type.h"
#include "src/global-handles.h" #include "src/global-handles.h"
#include "src/heap/slots-buffer.h"
#include "src/ic/ic.h" #include "src/ic/ic.h"
#include "src/macro-assembler.h" #include "src/macro-assembler.h"
#include "test/cctest/cctest.h" #include "test/cctest/cctest.h"
...@@ -1473,12 +1474,19 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map, ...@@ -1473,12 +1474,19 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value))); CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value)); CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger incremental write barrier, which should add a slot to remembered // Trigger incremental write barrier, which should add a slot to |ec_page|'s
// set. // slots buffer.
{ {
int slots_buffer_len = SlotsBuffer::SizeOfChain(ec_page->slots_buffer());
FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor); FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
const int n = SlotsBuffer::kNumberOfElements + 10;
for (int i = 0; i < n; i++) {
obj->FastPropertyAtPut(index, *obj_value); obj->FastPropertyAtPut(index, *obj_value);
} }
// Ensure that the slot was actually added to the |ec_page|'s slots buffer.
CHECK_EQ(slots_buffer_len + n,
SlotsBuffer::SizeOfChain(ec_page->slots_buffer()));
}
// Migrate |obj| to |new_map| which should shift fields and put the // Migrate |obj| to |new_map| which should shift fields and put the
// |boom_value| to the slot that was earlier recorded by incremental write // |boom_value| to the slot that was earlier recorded by incremental write
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <limits>
#include <set>
#include "src/globals.h"
#include "src/heap/remembered-set.h"
#include "src/heap/spaces.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace internal {
TEST(LocalSlotsBuffer, InsertAndIterate) {
LocalSlotsBuffer buffer;
std::set<Address> untyped;
std::set<std::pair<SlotType, Address> > typed;
for (int k = 1000; k < 10000; k += NUMBER_OF_SLOT_TYPES) {
untyped.insert(reinterpret_cast<Address>(k));
buffer.Record(reinterpret_cast<Address>(k));
for (int i = 0; i < NUMBER_OF_SLOT_TYPES; i++) {
typed.insert(std::make_pair(static_cast<SlotType>(i),
reinterpret_cast<Address>(k + i)));
buffer.Record(static_cast<SlotType>(i), reinterpret_cast<Address>(k + i));
}
}
buffer.Iterate(
[&untyped](Address addr) {
EXPECT_NE(untyped.count(addr), 0);
untyped.erase(addr);
},
[&typed](SlotType type, Address addr) {
EXPECT_NE(typed.count(std::make_pair(type, addr)), 0);
typed.erase(std::make_pair(type, addr));
});
EXPECT_EQ(untyped.size(), 0);
EXPECT_EQ(typed.size(), 0);
}
} // namespace internal
} // namespace v8
...@@ -55,9 +55,9 @@ TEST(SlotSet, Iterate) { ...@@ -55,9 +55,9 @@ TEST(SlotSet, Iterate) {
set.Iterate([](Address slot_address) { set.Iterate([](Address slot_address) {
uintptr_t intaddr = reinterpret_cast<uintptr_t>(slot_address); uintptr_t intaddr = reinterpret_cast<uintptr_t>(slot_address);
if (intaddr % 3 == 0) { if (intaddr % 3 == 0) {
return KEEP_SLOT; return SlotSet::KEEP_SLOT;
} else { } else {
return REMOVE_SLOT; return SlotSet::REMOVE_SLOT;
} }
}); });
...@@ -139,33 +139,5 @@ TEST(SlotSet, RemoveRange) { ...@@ -139,33 +139,5 @@ TEST(SlotSet, RemoveRange) {
} }
} }
TEST(TypedSlotSet, Iterate) {
TypedSlotSet set(0);
const int kDelta = 10000001;
int added = 0;
for (uint32_t i = 0; i < TypedSlotSet::kMaxOffset; i += kDelta) {
SlotType type = static_cast<SlotType>(i % NUMBER_OF_SLOT_TYPES);
set.Insert(type, i);
++added;
}
int iterated = 0;
set.Iterate([&iterated, kDelta](SlotType type, Address addr) {
uint32_t i = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr));
EXPECT_EQ(i % NUMBER_OF_SLOT_TYPES, static_cast<uint32_t>(type));
EXPECT_EQ(0, i % kDelta);
++iterated;
return i % 2 == 0 ? KEEP_SLOT : REMOVE_SLOT;
});
EXPECT_EQ(added, iterated);
iterated = 0;
set.Iterate([&iterated](SlotType type, Address addr) {
uint32_t i = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr));
EXPECT_EQ(0, i % 2);
++iterated;
return KEEP_SLOT;
});
EXPECT_EQ(added / 2, iterated);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -111,7 +111,6 @@ ...@@ -111,7 +111,6 @@
'heap/heap-unittest.cc', 'heap/heap-unittest.cc',
'heap/scavenge-job-unittest.cc', 'heap/scavenge-job-unittest.cc',
'heap/slot-set-unittest.cc', 'heap/slot-set-unittest.cc',
'heap/remembered-set-unittest.cc',
'locked-queue-unittest.cc', 'locked-queue-unittest.cc',
'run-all-unittests.cc', 'run-all-unittests.cc',
'runtime/runtime-interpreter-unittest.cc', 'runtime/runtime-interpreter-unittest.cc',
......
...@@ -897,6 +897,8 @@ ...@@ -897,6 +897,8 @@
'../../src/heap/scavenger.cc', '../../src/heap/scavenger.cc',
'../../src/heap/scavenger.h', '../../src/heap/scavenger.h',
'../../src/heap/slot-set.h', '../../src/heap/slot-set.h',
'../../src/heap/slots-buffer.cc',
'../../src/heap/slots-buffer.h',
'../../src/heap/spaces-inl.h', '../../src/heap/spaces-inl.h',
'../../src/heap/spaces.cc', '../../src/heap/spaces.cc',
'../../src/heap/spaces.h', '../../src/heap/spaces.h',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment