Commit fddd4f06 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Optimize migration of objects round two

* Not jump around all memory to gather whether we need to profile
* Cache this information and dispatch to a templatized function

BUG=chromium:524425
LOG=N

Review URL: https://codereview.chromium.org/1820263002

Cr-Commit-Position: refs/heads/master@{#34988}
parent 1134688c
...@@ -438,31 +438,11 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { ...@@ -438,31 +438,11 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
return false; return false;
} }
void Heap::CopyBlock(Address dst, Address src, int byte_size) { void Heap::CopyBlock(Address dst, Address src, int byte_size) {
CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src), CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
static_cast<size_t>(byte_size / kPointerSize)); static_cast<size_t>(byte_size / kPointerSize));
} }
void Heap::MoveBlock(Address dst, Address src, int byte_size) {
DCHECK(IsAligned(byte_size, kPointerSize));
int size_in_words = byte_size / kPointerSize;
if ((dst < src) || (dst >= (src + byte_size))) {
Object** src_slot = reinterpret_cast<Object**>(src);
Object** dst_slot = reinterpret_cast<Object**>(dst);
Object** end_slot = src_slot + size_in_words;
while (src_slot != end_slot) {
*dst_slot++ = *src_slot++;
}
} else {
MemMove(dst, src, static_cast<size_t>(byte_size));
}
}
template <Heap::FindMementoMode mode> template <Heap::FindMementoMode mode>
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
// Check if there is potentially a memento behind the object. If // Check if there is potentially a memento behind the object. If
......
...@@ -594,10 +594,6 @@ class Heap { ...@@ -594,10 +594,6 @@ class Heap {
// by pointer size. // by pointer size.
static inline void CopyBlock(Address dst, Address src, int byte_size); static inline void CopyBlock(Address dst, Address src, int byte_size);
// Optimized version of memmove for blocks with pointer size aligned sizes and
// pointer size aligned addresses.
static inline void MoveBlock(Address dst, Address src, int byte_size);
// Determines a static visitor id based on the given {map} that can then be // Determines a static visitor id based on the given {map} that can then be
// stored on the map to facilitate fast dispatch for {StaticVisitorBase}. // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
static int GetStaticVisitorIdForMap(Map* map); static int GetStaticVisitorIdForMap(Map* map);
......
...@@ -1550,9 +1550,16 @@ class MarkCompactCollector::HeapObjectVisitor { ...@@ -1550,9 +1550,16 @@ class MarkCompactCollector::HeapObjectVisitor {
class MarkCompactCollector::EvacuateVisitorBase class MarkCompactCollector::EvacuateVisitorBase
: public MarkCompactCollector::HeapObjectVisitor { : public MarkCompactCollector::HeapObjectVisitor {
public: protected:
enum MigrationMode { kFast, kProfiled };
EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces) EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces)
: heap_(heap), compaction_spaces_(compaction_spaces) {} : heap_(heap),
compaction_spaces_(compaction_spaces),
profiling_(
heap->isolate()->cpu_profiler()->is_profiling() ||
heap->isolate()->logger()->is_logging_code_events() ||
heap->isolate()->heap_profiler()->is_tracking_object_moves()) {}
inline bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object, inline bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object,
HeapObject** target_object) { HeapObject** target_object) {
...@@ -1566,6 +1573,16 @@ class MarkCompactCollector::EvacuateVisitorBase ...@@ -1566,6 +1573,16 @@ class MarkCompactCollector::EvacuateVisitorBase
return false; return false;
} }
inline void MigrateObject(HeapObject* dst, HeapObject* src, int size,
AllocationSpace dest) {
if (profiling_) {
MigrateObject<kProfiled>(dst, src, size, dest);
} else {
MigrateObject<kFast>(dst, src, size, dest);
}
}
template <MigrationMode mode>
inline void MigrateObject(HeapObject* dst, HeapObject* src, int size, inline void MigrateObject(HeapObject* dst, HeapObject* src, int size,
AllocationSpace dest) { AllocationSpace dest) {
Address dst_addr = dst->address(); Address dst_addr = dst->address();
...@@ -1575,8 +1592,8 @@ class MarkCompactCollector::EvacuateVisitorBase ...@@ -1575,8 +1592,8 @@ class MarkCompactCollector::EvacuateVisitorBase
if (dest == OLD_SPACE) { if (dest == OLD_SPACE) {
DCHECK_OBJECT_SIZE(size); DCHECK_OBJECT_SIZE(size);
DCHECK(IsAligned(size, kPointerSize)); DCHECK(IsAligned(size, kPointerSize));
heap_->MoveBlock(dst_addr, src_addr, size); heap_->CopyBlock(dst_addr, src_addr, size);
if (FLAG_ignition && dst->IsBytecodeArray()) { if ((mode == kProfiled) && FLAG_ignition && dst->IsBytecodeArray()) {
PROFILE(heap_->isolate(), PROFILE(heap_->isolate(),
CodeMoveEvent(AbstractCode::cast(src), dst_addr)); CodeMoveEvent(AbstractCode::cast(src), dst_addr));
} }
...@@ -1584,24 +1601,28 @@ class MarkCompactCollector::EvacuateVisitorBase ...@@ -1584,24 +1601,28 @@ class MarkCompactCollector::EvacuateVisitorBase
dst->IterateBodyFast(dst->map()->instance_type(), size, &visitor); dst->IterateBodyFast(dst->map()->instance_type(), size, &visitor);
} else if (dest == CODE_SPACE) { } else if (dest == CODE_SPACE) {
DCHECK_CODEOBJECT_SIZE(size, heap_->code_space()); DCHECK_CODEOBJECT_SIZE(size, heap_->code_space());
PROFILE(heap_->isolate(), if (mode == kProfiled) {
CodeMoveEvent(AbstractCode::cast(src), dst_addr)); PROFILE(heap_->isolate(),
heap_->MoveBlock(dst_addr, src_addr, size); CodeMoveEvent(AbstractCode::cast(src), dst_addr));
}
heap_->CopyBlock(dst_addr, src_addr, size);
RememberedSet<OLD_TO_OLD>::InsertTyped(Page::FromAddress(dst_addr), RememberedSet<OLD_TO_OLD>::InsertTyped(Page::FromAddress(dst_addr),
RELOCATED_CODE_OBJECT, dst_addr); RELOCATED_CODE_OBJECT, dst_addr);
Code::cast(dst)->Relocate(dst_addr - src_addr); Code::cast(dst)->Relocate(dst_addr - src_addr);
} else { } else {
DCHECK_OBJECT_SIZE(size); DCHECK_OBJECT_SIZE(size);
DCHECK(dest == NEW_SPACE); DCHECK(dest == NEW_SPACE);
heap_->MoveBlock(dst_addr, src_addr, size); heap_->CopyBlock(dst_addr, src_addr, size);
}
if (mode == kProfiled) {
heap_->OnMoveEvent(dst, src, size);
} }
heap_->OnMoveEvent(dst, src, size);
Memory::Address_at(src_addr) = dst_addr; Memory::Address_at(src_addr) = dst_addr;
} }
protected:
Heap* heap_; Heap* heap_;
CompactionSpaceCollection* compaction_spaces_; CompactionSpaceCollection* compaction_spaces_;
bool profiling_;
}; };
class MarkCompactCollector::EvacuateNewSpaceVisitor final class MarkCompactCollector::EvacuateNewSpaceVisitor final
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment