Commit fe3d409d authored by mlippautz's avatar mlippautz Committed by Commit bot

Reland "[heap] Unify evacuating an object for new and old generation."

R=hpayer@chromium.org
BUG=chromium:524425
LOG=N

Review URL: https://codereview.chromium.org/1504773002

Cr-Commit-Position: refs/heads/master@{#32712}
parent 6a132845
...@@ -1535,18 +1535,51 @@ class MarkCompactCollector::HeapObjectVisitor { ...@@ -1535,18 +1535,51 @@ class MarkCompactCollector::HeapObjectVisitor {
}; };
class MarkCompactCollector::EvacuateNewSpaceVisitor class MarkCompactCollector::EvacuateVisitorBase
: public MarkCompactCollector::HeapObjectVisitor { : public MarkCompactCollector::HeapObjectVisitor {
public: public:
explicit EvacuateNewSpaceVisitor(Heap* heap) : heap_(heap) {} EvacuateVisitorBase(Heap* heap, SlotsBuffer** evacuation_slots_buffer)
: heap_(heap), evacuation_slots_buffer_(evacuation_slots_buffer) {}
virtual bool Visit(HeapObject* object) { bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object,
Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); HeapObject** target_object) {
int size = object->Size(); int size = object->Size();
AllocationAlignment alignment = object->RequiredAlignment();
AllocationResult allocation = target_space->AllocateRaw(size, alignment);
if (allocation.To(target_object)) {
heap_->mark_compact_collector()->MigrateObject(
*target_object, object, size, target_space->identity(),
evacuation_slots_buffer_);
return true;
}
return false;
}
// TODO(hpayer): Refactor EvacuateObject and call this function instead. protected:
Heap* heap_;
SlotsBuffer** evacuation_slots_buffer_;
};
class MarkCompactCollector::EvacuateNewSpaceVisitor
: public MarkCompactCollector::EvacuateVisitorBase {
public:
explicit EvacuateNewSpaceVisitor(Heap* heap,
SlotsBuffer** evacuation_slots_buffer)
: EvacuateVisitorBase(heap, evacuation_slots_buffer) {}
bool Visit(HeapObject* object) override {
Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT);
int size = object->Size();
HeapObject* target_object = nullptr;
if (heap_->ShouldBePromoted(object->address(), size) && if (heap_->ShouldBePromoted(object->address(), size) &&
heap_->mark_compact_collector()->TryPromoteObject(object, size)) { TryEvacuateObject(heap_->old_space(), object, &target_object)) {
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object));
}
heap_->IncrementPromotedObjectsSize(size);
return true; return true;
} }
...@@ -1573,77 +1606,34 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor ...@@ -1573,77 +1606,34 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor
heap_->IncrementSemiSpaceCopiedObjectSize(size); heap_->IncrementSemiSpaceCopiedObjectSize(size);
return true; return true;
} }
private:
Heap* heap_;
}; };
class MarkCompactCollector::EvacuateOldSpaceVisitor class MarkCompactCollector::EvacuateOldSpaceVisitor
: public MarkCompactCollector::HeapObjectVisitor { : public MarkCompactCollector::EvacuateVisitorBase {
public: public:
EvacuateOldSpaceVisitor(Heap* heap, EvacuateOldSpaceVisitor(Heap* heap,
CompactionSpaceCollection* compaction_spaces, CompactionSpaceCollection* compaction_spaces,
SlotsBuffer** evacuation_slots_buffer) SlotsBuffer** evacuation_slots_buffer)
: heap_(heap), : EvacuateVisitorBase(heap, evacuation_slots_buffer),
compaction_spaces_(compaction_spaces), compaction_spaces_(compaction_spaces) {}
evacuation_slots_buffer_(evacuation_slots_buffer) {}
virtual bool Visit(HeapObject* object) { bool Visit(HeapObject* object) override {
int size = object->Size(); CompactionSpace* target_space = compaction_spaces_->Get(
AllocationAlignment alignment = object->RequiredAlignment(); Page::FromAddress(object->address())->owner()->identity());
HeapObject* target_object = nullptr; HeapObject* target_object = nullptr;
AllocationSpace id = if (TryEvacuateObject(target_space, object, &target_object)) {
Page::FromAddress(object->address())->owner()->identity(); DCHECK(object->map_word().IsForwardingAddress());
AllocationResult allocation = return true;
compaction_spaces_->Get(id)->AllocateRaw(size, alignment);
if (!allocation.To(&target_object)) {
return false;
} }
heap_->mark_compact_collector()->MigrateObject( return false;
target_object, object, size, id, evacuation_slots_buffer_);
DCHECK(object->map_word().IsForwardingAddress());
return true;
} }
private: private:
Heap* heap_;
CompactionSpaceCollection* compaction_spaces_; CompactionSpaceCollection* compaction_spaces_;
SlotsBuffer** evacuation_slots_buffer_;
}; };
bool MarkCompactCollector::IterateLiveObjectsOnPage(MemoryChunk* page,
HeapObjectVisitor* visitor,
IterationMode mode) {
Address offsets[16];
for (MarkBitCellIterator it(page); !it.Done(); it.Advance()) {
Address cell_base = it.CurrentCellBase();
MarkBit::CellType* cell = it.CurrentCell();
if (*cell == 0) continue;
int live_objects = MarkWordToObjectStarts(*cell, cell_base, offsets);
for (int i = 0; i < live_objects; i++) {
HeapObject* object = HeapObject::FromAddress(offsets[i]);
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
if (!visitor->Visit(object)) {
if ((mode == kClearMarkbits) && (i > 0)) {
page->markbits()->ClearRange(
page->AddressToMarkbitIndex(page->area_start()),
page->AddressToMarkbitIndex(offsets[i]));
}
return false;
}
}
if (mode == kClearMarkbits) {
*cell = 0;
}
}
return true;
}
void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
PageIterator it(space); PageIterator it(space);
while (it.has_next()) { while (it.has_next()) {
...@@ -2908,28 +2898,6 @@ static String* UpdateReferenceInExternalStringTableEntry(Heap* heap, ...@@ -2908,28 +2898,6 @@ static String* UpdateReferenceInExternalStringTableEntry(Heap* heap,
} }
bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
int object_size) {
OldSpace* old_space = heap()->old_space();
HeapObject* target = nullptr;
AllocationAlignment alignment = object->RequiredAlignment();
AllocationResult allocation = old_space->AllocateRaw(object_size, alignment);
if (allocation.To(&target)) {
MigrateObject(target, object, object_size, old_space->identity(),
&migration_slots_buffer_);
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap()->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
}
heap()->IncrementPromotedObjectsSize(object_size);
return true;
}
return false;
}
bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
HeapObject** out_object) { HeapObject** out_object) {
Space* owner = p->owner(); Space* owner = p->owner();
...@@ -3102,11 +3070,11 @@ void MarkCompactCollector::EvacuateNewSpace() { ...@@ -3102,11 +3070,11 @@ void MarkCompactCollector::EvacuateNewSpace() {
// new entries in the store buffer and may cause some pages to be marked // new entries in the store buffer and may cause some pages to be marked
// scan-on-scavenge. // scan-on-scavenge.
NewSpacePageIterator it(from_bottom, from_top); NewSpacePageIterator it(from_bottom, from_top);
EvacuateNewSpaceVisitor new_space_visitor(heap()); EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_);
while (it.has_next()) { while (it.has_next()) {
NewSpacePage* p = it.next(); NewSpacePage* p = it.next();
survivors_size += p->LiveBytes(); survivors_size += p->LiveBytes();
bool ok = IterateLiveObjectsOnPage(p, &new_space_visitor, kClearMarkbits); bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits);
USE(ok); USE(ok);
DCHECK(ok); DCHECK(ok);
} }
...@@ -3308,7 +3276,7 @@ void MarkCompactCollector::EvacuatePages( ...@@ -3308,7 +3276,7 @@ void MarkCompactCollector::EvacuatePages(
double start = heap()->MonotonicallyIncreasingTimeInMs(); double start = heap()->MonotonicallyIncreasingTimeInMs();
intptr_t live_bytes = p->LiveBytes(); intptr_t live_bytes = p->LiveBytes();
AlwaysAllocateScope always_allocate(isolate()); AlwaysAllocateScope always_allocate(isolate());
if (IterateLiveObjectsOnPage(p, &visitor, kClearMarkbits)) { if (VisitLiveObjects(p, &visitor, kClearMarkbits)) {
p->ResetLiveBytes(); p->ResetLiveBytes();
p->parallel_compaction_state().SetValue( p->parallel_compaction_state().SetValue(
MemoryChunk::kCompactingFinalize); MemoryChunk::kCompactingFinalize);
...@@ -3495,9 +3463,37 @@ void MarkCompactCollector::RemoveObjectSlots(Address start_slot, ...@@ -3495,9 +3463,37 @@ void MarkCompactCollector::RemoveObjectSlots(Address start_slot,
} }
void MarkCompactCollector::VisitLiveObjects(Page* page, bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page,
ObjectVisitor* visitor) { HeapObjectVisitor* visitor,
// First pass on aborted pages. IterationMode mode) {
Address offsets[16];
for (MarkBitCellIterator it(page); !it.Done(); it.Advance()) {
Address cell_base = it.CurrentCellBase();
MarkBit::CellType* cell = it.CurrentCell();
if (*cell == 0) continue;
int live_objects = MarkWordToObjectStarts(*cell, cell_base, offsets);
for (int i = 0; i < live_objects; i++) {
HeapObject* object = HeapObject::FromAddress(offsets[i]);
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
if (!visitor->Visit(object)) {
if ((mode == kClearMarkbits) && (i > 0)) {
page->markbits()->ClearRange(
page->AddressToMarkbitIndex(page->area_start()),
page->AddressToMarkbitIndex(offsets[i]));
}
return false;
}
}
if (mode == kClearMarkbits) {
*cell = 0;
}
}
return true;
}
void MarkCompactCollector::VisitLiveObjectsBody(Page* page,
ObjectVisitor* visitor) {
Address starts[16]; Address starts[16];
for (MarkBitCellIterator it(page); !it.Done(); it.Advance()) { for (MarkBitCellIterator it(page); !it.Done(); it.Advance()) {
Address cell_base = it.CurrentCellBase(); Address cell_base = it.CurrentCellBase();
...@@ -3642,7 +3638,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { ...@@ -3642,7 +3638,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
// First pass on aborted pages, fixing up all live objects. // First pass on aborted pages, fixing up all live objects.
if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
p->ClearEvacuationCandidate(); p->ClearEvacuationCandidate();
VisitLiveObjects(p, &updating_visitor); VisitLiveObjectsBody(p, &updating_visitor);
} }
} }
......
...@@ -319,10 +319,6 @@ class MarkCompactCollector { ...@@ -319,10 +319,6 @@ class MarkCompactCollector {
kClearMarkbits, kClearMarkbits,
}; };
class EvacuateNewSpaceVisitor;
class EvacuateOldSpaceVisitor;
class HeapObjectVisitor;
static void Initialize(); static void Initialize();
void SetUp(); void SetUp();
...@@ -409,8 +405,6 @@ class MarkCompactCollector { ...@@ -409,8 +405,6 @@ class MarkCompactCollector {
AllocationSpace to_old_space, AllocationSpace to_old_space,
SlotsBuffer** evacuation_slots_buffer); SlotsBuffer** evacuation_slots_buffer);
bool TryPromoteObject(HeapObject* object, int object_size);
void InvalidateCode(Code* code); void InvalidateCode(Code* code);
void ClearMarkbits(); void ClearMarkbits();
...@@ -508,6 +502,10 @@ class MarkCompactCollector { ...@@ -508,6 +502,10 @@ class MarkCompactCollector {
private: private:
class CompactionTask; class CompactionTask;
class EvacuateNewSpaceVisitor;
class EvacuateOldSpaceVisitor;
class EvacuateVisitorBase;
class HeapObjectVisitor;
class SweeperTask; class SweeperTask;
explicit MarkCompactCollector(Heap* heap); explicit MarkCompactCollector(Heap* heap);
...@@ -701,10 +699,6 @@ class MarkCompactCollector { ...@@ -701,10 +699,6 @@ class MarkCompactCollector {
// regions to each space's free list. // regions to each space's free list.
void SweepSpaces(); void SweepSpaces();
// Iterates through all live objects on a page using marking information.
// Returns whether all objects have successfully been visited.
bool IterateLiveObjectsOnPage(MemoryChunk* page, HeapObjectVisitor* visitor,
IterationMode mode);
void EvacuateNewSpace(); void EvacuateNewSpace();
...@@ -726,7 +720,12 @@ class MarkCompactCollector { ...@@ -726,7 +720,12 @@ class MarkCompactCollector {
void EvacuateNewSpaceAndCandidates(); void EvacuateNewSpaceAndCandidates();
void VisitLiveObjects(Page* page, ObjectVisitor* visitor); // Iterates through all live objects on a page using marking information.
// Returns whether all objects have successfully been visited.
bool VisitLiveObjects(MemoryChunk* page, HeapObjectVisitor* visitor,
IterationMode mode);
void VisitLiveObjectsBody(Page* page, ObjectVisitor* visitor);
void SweepAbortedPages(); void SweepAbortedPages();
......
...@@ -496,7 +496,8 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { ...@@ -496,7 +496,8 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
// only happen later). Note that we can never reach an // only happen later). Note that we can never reach an
// aborted page through the scavenger. // aborted page through the scavenger.
DCHECK_EQ(heap_->gc_state(), Heap::MARK_COMPACT); DCHECK_EQ(heap_->gc_state(), Heap::MARK_COMPACT);
heap_->mark_compact_collector()->VisitLiveObjects(page, &visitor); heap_->mark_compact_collector()->VisitLiveObjectsBody(page,
&visitor);
} else { } else {
heap_->mark_compact_collector() heap_->mark_compact_collector()
->SweepOrWaitUntilSweepingCompleted(page); ->SweepOrWaitUntilSweepingCompleted(page);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment