Commit c7456abf authored by hpayer's avatar hpayer Committed by Commit bot

Change RecordSlot interface. Make it more robust by replacing anchor slot with actual object.

BUG=

Review URL: https://codereview.chromium.org/1259613006

Cr-Commit-Position: refs/heads/master@{#30007}
parent 1813f80d
...@@ -2084,15 +2084,17 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, ...@@ -2084,15 +2084,17 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
int end_of_region_offset; int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) { if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace( IterateAndMarkPointersToFromSpace(
record_slots, obj_address + offset, target, obj_address + offset,
obj_address + end_of_region_offset, &ScavengeObject); obj_address + end_of_region_offset, record_slots,
&ScavengeObject);
} }
offset = end_of_region_offset; offset = end_of_region_offset;
} }
} else { } else {
#endif #endif
IterateAndMarkPointersToFromSpace( IterateAndMarkPointersToFromSpace(target, obj_address,
record_slots, obj_address, obj_address + size, &ScavengeObject); obj_address + size, record_slots,
&ScavengeObject);
#if V8_DOUBLE_FIELDS_UNBOXING #if V8_DOUBLE_FIELDS_UNBOXING
} }
#endif #endif
...@@ -2418,7 +2420,7 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -2418,7 +2420,7 @@ class ScavengingVisitor : public StaticVisitorBase {
target->address() + JSFunction::kCodeEntryOffset; target->address() + JSFunction::kCodeEntryOffset;
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
code_entry_slot, code); target, code_entry_slot, code);
} }
} }
...@@ -3582,8 +3584,8 @@ void Heap::AddAllocationSiteToScratchpad(AllocationSite* site, ...@@ -3582,8 +3584,8 @@ void Heap::AddAllocationSiteToScratchpad(AllocationSite* site,
// candidates are not part of the global list of old space pages and // candidates are not part of the global list of old space pages and
// releasing an evacuation candidate due to a slots buffer overflow // releasing an evacuation candidate due to a slots buffer overflow
// results in lost pages. // results in lost pages.
mark_compact_collector()->RecordSlot(slot, slot, *slot, mark_compact_collector()->RecordSlot(allocation_sites_scratchpad(), slot,
SlotsBuffer::IGNORE_OVERFLOW); *slot, SlotsBuffer::IGNORE_OVERFLOW);
} }
allocation_sites_scratchpad_length_++; allocation_sites_scratchpad_length_++;
} }
...@@ -5132,33 +5134,33 @@ void Heap::ZapFromSpace() { ...@@ -5132,33 +5134,33 @@ void Heap::ZapFromSpace() {
} }
void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start, void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
Address end, Address end, bool record_slots,
ObjectSlotCallback callback) { ObjectSlotCallback callback) {
Address slot_address = start; Address slot_address = start;
while (slot_address < end) { while (slot_address < end) {
Object** slot = reinterpret_cast<Object**>(slot_address); Object** slot = reinterpret_cast<Object**>(slot_address);
Object* object = *slot; Object* target = *slot;
// If the store buffer becomes overfull we mark pages as being exempt from // If the store buffer becomes overfull we mark pages as being exempt from
// the store buffer. These pages are scanned to find pointers that point // the store buffer. These pages are scanned to find pointers that point
// to the new space. In that case we may hit newly promoted objects and // to the new space. In that case we may hit newly promoted objects and
// fix the pointers before the promotion queue gets to them. Thus the 'if'. // fix the pointers before the promotion queue gets to them. Thus the 'if'.
if (object->IsHeapObject()) { if (target->IsHeapObject()) {
if (Heap::InFromSpace(object)) { if (Heap::InFromSpace(target)) {
callback(reinterpret_cast<HeapObject**>(slot), callback(reinterpret_cast<HeapObject**>(slot),
HeapObject::cast(object)); HeapObject::cast(target));
Object* new_object = *slot; Object* new_target = *slot;
if (InNewSpace(new_object)) { if (InNewSpace(new_target)) {
SLOW_DCHECK(Heap::InToSpace(new_object)); SLOW_DCHECK(Heap::InToSpace(new_target));
SLOW_DCHECK(new_object->IsHeapObject()); SLOW_DCHECK(new_target->IsHeapObject());
store_buffer_.EnterDirectlyIntoStoreBuffer( store_buffer_.EnterDirectlyIntoStoreBuffer(
reinterpret_cast<Address>(slot)); reinterpret_cast<Address>(slot));
} }
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_object)); SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_target));
} else if (record_slots && } else if (record_slots &&
MarkCompactCollector::IsOnEvacuationCandidate(object)) { MarkCompactCollector::IsOnEvacuationCandidate(target)) {
mark_compact_collector()->RecordSlot(slot, slot, object); mark_compact_collector()->RecordSlot(object, slot, target);
} }
} }
slot_address += kPointerSize; slot_address += kPointerSize;
......
...@@ -952,9 +952,9 @@ class Heap { ...@@ -952,9 +952,9 @@ class Heap {
void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
// Iterate pointers to from semispace of new space found in memory interval // Iterate pointers to from semispace of new space found in memory interval
// from start to end. // from start to end within |object|.
void IterateAndMarkPointersToFromSpace(bool record_slots, Address start, void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
Address end, Address end, bool record_slots,
ObjectSlotCallback callback); ObjectSlotCallback callback);
// Returns whether the object resides in new space. // Returns whether the object resides in new space.
......
...@@ -40,8 +40,7 @@ void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, ...@@ -40,8 +40,7 @@ void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
MarkBit obj_bit = Marking::MarkBitFrom(obj); MarkBit obj_bit = Marking::MarkBitFrom(obj);
if (Marking::IsBlack(obj_bit)) { if (Marking::IsBlack(obj_bit)) {
// Object is not going to be rescanned we need to record the slot. // Object is not going to be rescanned we need to record the slot.
heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0), heap_->mark_compact_collector()->RecordSlot(obj, slot, value);
slot, value);
} }
} }
} }
...@@ -92,7 +91,7 @@ void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host, ...@@ -92,7 +91,7 @@ void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
if (BaseRecordWrite(host, slot, value)) { if (BaseRecordWrite(host, slot, value)) {
DCHECK(slot != NULL); DCHECK(slot != NULL);
heap_->mark_compact_collector()->RecordCodeEntrySlot( heap_->mark_compact_collector()->RecordCodeEntrySlot(
reinterpret_cast<Address>(slot), value); host, reinterpret_cast<Address>(slot), value);
} }
} }
...@@ -177,9 +176,8 @@ class IncrementalMarkingMarkingVisitor ...@@ -177,9 +176,8 @@ class IncrementalMarkingMarkingVisitor
int already_scanned_offset = start_offset; int already_scanned_offset = start_offset;
bool scan_until_end = false; bool scan_until_end = false;
do { do {
VisitPointersWithAnchor(heap, HeapObject::RawField(object, 0), VisitPointers(heap, object, HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, start_offset), HeapObject::RawField(object, end_offset));
HeapObject::RawField(object, end_offset));
start_offset = end_offset; start_offset = end_offset;
end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
scan_until_end = scan_until_end =
...@@ -214,31 +212,21 @@ class IncrementalMarkingMarkingVisitor ...@@ -214,31 +212,21 @@ class IncrementalMarkingMarkingVisitor
VisitNativeContext(map, context); VisitNativeContext(map, context);
} }
INLINE(static void VisitPointer(Heap* heap, Object** p)) { INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
Object* obj = *p; Object* target = *p;
if (obj->IsHeapObject()) { if (target->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(p, p, obj); heap->mark_compact_collector()->RecordSlot(object, p, target);
MarkObject(heap, obj); MarkObject(heap, target);
}
}
INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
for (Object** p = start; p < end; p++) {
Object* obj = *p;
if (obj->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(start, p, obj);
MarkObject(heap, obj);
}
} }
} }
INLINE(static void VisitPointersWithAnchor(Heap* heap, Object** anchor, INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
Object** start, Object** end)) { Object** start, Object** end)) {
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
Object* obj = *p; Object* target = *p;
if (obj->IsHeapObject()) { if (target->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(anchor, p, obj); heap->mark_compact_collector()->RecordSlot(object, p, target);
MarkObject(heap, obj); MarkObject(heap, target);
} }
} }
} }
......
...@@ -55,15 +55,15 @@ bool MarkCompactCollector::IsMarked(Object* obj) { ...@@ -55,15 +55,15 @@ bool MarkCompactCollector::IsMarked(Object* obj) {
} }
void MarkCompactCollector::RecordSlot(Object** anchor_slot, Object** slot, void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
Object* object, Object* target,
SlotsBuffer::AdditionMode mode) { SlotsBuffer::AdditionMode mode) {
Page* object_page = Page::FromAddress(reinterpret_cast<Address>(object)); Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
if (object_page->IsEvacuationCandidate() && if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(anchor_slot)) { !ShouldSkipEvacuationSlotRecording(object)) {
if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, if (!SlotsBuffer::AddTo(&slots_buffer_allocator_,
object_page->slots_buffer_address(), slot, mode)) { target_page->slots_buffer_address(), slot, mode)) {
EvictPopularEvacuationCandidate(object_page); EvictPopularEvacuationCandidate(target_page);
} }
} }
} }
......
...@@ -904,13 +904,13 @@ void CodeFlusher::ProcessJSFunctionCandidates() { ...@@ -904,13 +904,13 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
// setter did not record the slot update and we have to do that manually. // setter did not record the slot update and we have to do that manually.
Address slot = candidate->address() + JSFunction::kCodeEntryOffset; Address slot = candidate->address() + JSFunction::kCodeEntryOffset;
Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot));
isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(slot, isolate_->heap()->mark_compact_collector()->RecordCodeEntrySlot(
target); candidate, slot, target);
Object** shared_code_slot = Object** shared_code_slot =
HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset); HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset);
isolate_->heap()->mark_compact_collector()->RecordSlot( isolate_->heap()->mark_compact_collector()->RecordSlot(
shared_code_slot, shared_code_slot, *shared_code_slot); shared, shared_code_slot, *shared_code_slot);
candidate = next_candidate; candidate = next_candidate;
} }
...@@ -945,7 +945,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() { ...@@ -945,7 +945,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
Object** code_slot = Object** code_slot =
HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset); HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset);
isolate_->heap()->mark_compact_collector()->RecordSlot(code_slot, code_slot, isolate_->heap()->mark_compact_collector()->RecordSlot(candidate, code_slot,
*code_slot); *code_slot);
candidate = next_candidate; candidate = next_candidate;
...@@ -995,15 +995,15 @@ void CodeFlusher::ProcessOptimizedCodeMaps() { ...@@ -995,15 +995,15 @@ void CodeFlusher::ProcessOptimizedCodeMaps() {
Object** code_slot = code_map->RawFieldOfElementAt( Object** code_slot = code_map->RawFieldOfElementAt(
new_length + SharedFunctionInfo::kCachedCodeOffset); new_length + SharedFunctionInfo::kCachedCodeOffset);
isolate_->heap()->mark_compact_collector()->RecordSlot( isolate_->heap()->mark_compact_collector()->RecordSlot(
code_slot, code_slot, *code_slot); code_map, code_slot, *code_slot);
Object** context_slot = code_map->RawFieldOfElementAt( Object** context_slot = code_map->RawFieldOfElementAt(
new_length + SharedFunctionInfo::kContextOffset); new_length + SharedFunctionInfo::kContextOffset);
isolate_->heap()->mark_compact_collector()->RecordSlot( isolate_->heap()->mark_compact_collector()->RecordSlot(
context_slot, context_slot, *context_slot); code_map, context_slot, *context_slot);
Object** literals_slot = code_map->RawFieldOfElementAt( Object** literals_slot = code_map->RawFieldOfElementAt(
new_length + SharedFunctionInfo::kLiteralsOffset); new_length + SharedFunctionInfo::kLiteralsOffset);
isolate_->heap()->mark_compact_collector()->RecordSlot( isolate_->heap()->mark_compact_collector()->RecordSlot(
literals_slot, literals_slot, *literals_slot); code_map, literals_slot, *literals_slot);
new_length += SharedFunctionInfo::kEntryLength; new_length += SharedFunctionInfo::kEntryLength;
} }
...@@ -1017,7 +1017,7 @@ void CodeFlusher::ProcessOptimizedCodeMaps() { ...@@ -1017,7 +1017,7 @@ void CodeFlusher::ProcessOptimizedCodeMaps() {
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code))); DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
Object** slot = Object** slot =
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex); code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
isolate_->heap()->mark_compact_collector()->RecordSlot(slot, slot, isolate_->heap()->mark_compact_collector()->RecordSlot(code_map, slot,
*slot); *slot);
} }
} }
...@@ -1254,20 +1254,21 @@ class MarkCompactMarkingVisitor ...@@ -1254,20 +1254,21 @@ class MarkCompactMarkingVisitor
static void Initialize(); static void Initialize();
INLINE(static void VisitPointer(Heap* heap, Object** p)) { INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
MarkObjectByPointer(heap->mark_compact_collector(), p, p); MarkObjectByPointer(heap->mark_compact_collector(), object, p);
} }
INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
Object** start, Object** end)) {
// Mark all objects pointed to in [start, end). // Mark all objects pointed to in [start, end).
const int kMinRangeForMarkingRecursion = 64; const int kMinRangeForMarkingRecursion = 64;
if (end - start >= kMinRangeForMarkingRecursion) { if (end - start >= kMinRangeForMarkingRecursion) {
if (VisitUnmarkedObjects(heap, start, end)) return; if (VisitUnmarkedObjects(heap, object, start, end)) return;
// We are close to a stack overflow, so just mark the objects. // We are close to a stack overflow, so just mark the objects.
} }
MarkCompactCollector* collector = heap->mark_compact_collector(); MarkCompactCollector* collector = heap->mark_compact_collector();
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
MarkObjectByPointer(collector, start, p); MarkObjectByPointer(collector, object, p);
} }
} }
...@@ -1290,12 +1291,12 @@ class MarkCompactMarkingVisitor ...@@ -1290,12 +1291,12 @@ class MarkCompactMarkingVisitor
// Mark object pointed to by p. // Mark object pointed to by p.
INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
Object** anchor_slot, Object** p)) { HeapObject* object, Object** p)) {
if (!(*p)->IsHeapObject()) return; if (!(*p)->IsHeapObject()) return;
HeapObject* object = ShortCircuitConsString(p); HeapObject* target_object = ShortCircuitConsString(p);
collector->RecordSlot(anchor_slot, p, object); collector->RecordSlot(object, p, target_object);
MarkBit mark = Marking::MarkBitFrom(object); MarkBit mark = Marking::MarkBitFrom(target_object);
collector->MarkObject(object, mark); collector->MarkObject(target_object, mark);
} }
...@@ -1318,8 +1319,8 @@ class MarkCompactMarkingVisitor ...@@ -1318,8 +1319,8 @@ class MarkCompactMarkingVisitor
// Visit all unmarked objects pointed to by [start, end). // Visit all unmarked objects pointed to by [start, end).
// Returns false if the operation fails (lack of stack space). // Returns false if the operation fails (lack of stack space).
INLINE(static bool VisitUnmarkedObjects(Heap* heap, Object** start, INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object,
Object** end)) { Object** start, Object** end)) {
// Return false is we are close to the stack limit. // Return false is we are close to the stack limit.
StackLimitCheck check(heap->isolate()); StackLimitCheck check(heap->isolate());
if (check.HasOverflowed()) return false; if (check.HasOverflowed()) return false;
...@@ -1329,7 +1330,7 @@ class MarkCompactMarkingVisitor ...@@ -1329,7 +1330,7 @@ class MarkCompactMarkingVisitor
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
Object* o = *p; Object* o = *p;
if (!o->IsHeapObject()) continue; if (!o->IsHeapObject()) continue;
collector->RecordSlot(start, p, o); collector->RecordSlot(object, p, o);
HeapObject* obj = HeapObject::cast(o); HeapObject* obj = HeapObject::cast(o);
MarkBit mark = Marking::MarkBitFrom(obj); MarkBit mark = Marking::MarkBitFrom(obj);
if (Marking::IsBlackOrGrey(mark)) continue; if (Marking::IsBlackOrGrey(mark)) continue;
...@@ -1370,7 +1371,7 @@ class MarkCompactMarkingVisitor ...@@ -1370,7 +1371,7 @@ class MarkCompactMarkingVisitor
FixedArray* data = FixedArray::cast(re->data()); FixedArray* data = FixedArray::cast(re->data());
Object** slot = Object** slot =
data->data_start() + JSRegExp::saved_code_index(is_one_byte); data->data_start() + JSRegExp::saved_code_index(is_one_byte);
heap->mark_compact_collector()->RecordSlot(slot, slot, code); heap->mark_compact_collector()->RecordSlot(data, slot, code);
// Set a number in the 0-255 range to guarantee no smi overflow. // Set a number in the 0-255 range to guarantee no smi overflow.
re->SetDataAt(JSRegExp::code_index(is_one_byte), re->SetDataAt(JSRegExp::code_index(is_one_byte),
...@@ -2152,7 +2153,7 @@ void MarkCompactCollector::RetainMaps() { ...@@ -2152,7 +2153,7 @@ void MarkCompactCollector::RetainMaps() {
if (i != new_length) { if (i != new_length) {
retained_maps->Set(new_length, cell); retained_maps->Set(new_length, cell);
Object** slot = retained_maps->Slot(new_length); Object** slot = retained_maps->Slot(new_length);
RecordSlot(slot, slot, cell); RecordSlot(retained_maps, slot, cell);
retained_maps->Set(new_length + 1, Smi::FromInt(new_age)); retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
} else if (new_age != age) { } else if (new_age != age) {
retained_maps->Set(new_length + 1, Smi::FromInt(new_age)); retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
...@@ -2422,7 +2423,7 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { ...@@ -2422,7 +2423,7 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
prototype_transitions->set(header + new_number_of_transitions, cell); prototype_transitions->set(header + new_number_of_transitions, cell);
Object** slot = prototype_transitions->RawFieldOfElementAt( Object** slot = prototype_transitions->RawFieldOfElementAt(
header + new_number_of_transitions); header + new_number_of_transitions);
RecordSlot(slot, slot, cell); RecordSlot(prototype_transitions, slot, cell);
} }
new_number_of_transitions++; new_number_of_transitions++;
} }
...@@ -2505,7 +2506,7 @@ void MarkCompactCollector::ClearMapTransitions(Map* map, Map* dead_transition) { ...@@ -2505,7 +2506,7 @@ void MarkCompactCollector::ClearMapTransitions(Map* map, Map* dead_transition) {
Name* key = t->GetKey(i); Name* key = t->GetKey(i);
t->SetKey(transition_index, key); t->SetKey(transition_index, key);
Object** key_slot = t->GetKeySlot(transition_index); Object** key_slot = t->GetKeySlot(transition_index);
RecordSlot(key_slot, key_slot, key); RecordSlot(t, key_slot, key);
// Target slots do not need to be recorded since maps are not compacted. // Target slots do not need to be recorded since maps are not compacted.
t->SetTarget(transition_index, t->GetTarget(i)); t->SetTarget(transition_index, t->GetTarget(i));
} }
...@@ -2601,15 +2602,14 @@ void MarkCompactCollector::ProcessWeakCollections() { ...@@ -2601,15 +2602,14 @@ void MarkCompactCollector::ProcessWeakCollections() {
DCHECK(MarkCompactCollector::IsMarked(weak_collection)); DCHECK(MarkCompactCollector::IsMarked(weak_collection));
if (weak_collection->table()->IsHashTable()) { if (weak_collection->table()->IsHashTable()) {
ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
Object** anchor = reinterpret_cast<Object**>(table->address());
for (int i = 0; i < table->Capacity(); i++) { for (int i = 0; i < table->Capacity(); i++) {
if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
Object** key_slot = Object** key_slot =
table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
RecordSlot(anchor, key_slot, *key_slot); RecordSlot(table, key_slot, *key_slot);
Object** value_slot = Object** value_slot =
table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i)); table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
MarkCompactMarkingVisitor::MarkObjectByPointer(this, anchor, MarkCompactMarkingVisitor::MarkObjectByPointer(this, table,
value_slot); value_slot);
} }
} }
...@@ -2678,9 +2678,9 @@ void MarkCompactCollector::ProcessAndClearWeakCells() { ...@@ -2678,9 +2678,9 @@ void MarkCompactCollector::ProcessAndClearWeakCells() {
MarkBit mark = Marking::MarkBitFrom(value); MarkBit mark = Marking::MarkBitFrom(value);
SetMark(value, mark); SetMark(value, mark);
Object** slot = HeapObject::RawField(value, Cell::kValueOffset); Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
RecordSlot(slot, slot, *slot); RecordSlot(value, slot, *slot);
slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
RecordSlot(slot, slot, *slot); RecordSlot(weak_cell, slot, *slot);
} else { } else {
weak_cell->clear(); weak_cell->clear();
} }
...@@ -2689,7 +2689,7 @@ void MarkCompactCollector::ProcessAndClearWeakCells() { ...@@ -2689,7 +2689,7 @@ void MarkCompactCollector::ProcessAndClearWeakCells() {
} }
} else { } else {
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
RecordSlot(slot, slot, *slot); RecordSlot(weak_cell, slot, *slot);
} }
weak_cell_obj = weak_cell->next(); weak_cell_obj = weak_cell->next();
weak_cell->clear_next(heap()); weak_cell->clear_next(heap());
...@@ -4712,10 +4712,11 @@ void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) { ...@@ -4712,10 +4712,11 @@ void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) {
} }
void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) { void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
Code* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
if (target_page->IsEvacuationCandidate() && if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) { !ShouldSkipEvacuationSlotRecording(object)) {
if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, if (!SlotsBuffer::AddTo(&slots_buffer_allocator_,
target_page->slots_buffer_address(), target_page->slots_buffer_address(),
SlotsBuffer::CODE_ENTRY_SLOT, slot, SlotsBuffer::CODE_ENTRY_SLOT, slot,
......
...@@ -640,11 +640,6 @@ class MarkCompactCollector { ...@@ -640,11 +640,6 @@ class MarkCompactCollector {
void VerifyOmittedMapChecks(); void VerifyOmittedMapChecks();
#endif #endif
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object** anchor)) {
return Page::FromAddress(reinterpret_cast<Address>(anchor))
->ShouldSkipEvacuationSlotRecording();
}
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) {
return Page::FromAddress(reinterpret_cast<Address>(host)) return Page::FromAddress(reinterpret_cast<Address>(host))
->ShouldSkipEvacuationSlotRecording(); ->ShouldSkipEvacuationSlotRecording();
...@@ -656,11 +651,11 @@ class MarkCompactCollector { ...@@ -656,11 +651,11 @@ class MarkCompactCollector {
} }
void RecordRelocSlot(RelocInfo* rinfo, Object* target); void RecordRelocSlot(RelocInfo* rinfo, Object* target);
void RecordCodeEntrySlot(Address slot, Code* target); void RecordCodeEntrySlot(HeapObject* object, Address slot, Code* target);
void RecordCodeTargetPatch(Address pc, Code* target); void RecordCodeTargetPatch(Address pc, Code* target);
INLINE(void RecordSlot( INLINE(void RecordSlot(
Object** anchor_slot, Object** slot, Object* object, HeapObject* object, Object** slot, Object* target,
SlotsBuffer::AdditionMode mode = SlotsBuffer::FAIL_ON_OVERFLOW)); SlotsBuffer::AdditionMode mode = SlotsBuffer::FAIL_ON_OVERFLOW));
void MigrateObject(HeapObject* dst, HeapObject* src, int size, void MigrateObject(HeapObject* dst, HeapObject* src, int size,
......
...@@ -83,7 +83,7 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -83,7 +83,7 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
VisitPointers( VisitPointers(
heap, heap, object,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) { if (!JSArrayBuffer::cast(object)->is_external()) {
...@@ -98,7 +98,7 @@ template <typename StaticVisitor> ...@@ -98,7 +98,7 @@ template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray( int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
VisitPointers( VisitPointers(
map->GetHeap(), map->GetHeap(), object,
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
return JSTypedArray::kSizeWithInternalFields; return JSTypedArray::kSizeWithInternalFields;
...@@ -109,7 +109,7 @@ template <typename StaticVisitor> ...@@ -109,7 +109,7 @@ template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map, int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
HeapObject* object) { HeapObject* object) {
VisitPointers( VisitPointers(
map->GetHeap(), map->GetHeap(), object,
HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
return JSDataView::kSizeWithInternalFields; return JSDataView::kSizeWithInternalFields;
...@@ -199,9 +199,10 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() { ...@@ -199,9 +199,10 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
template <typename StaticVisitor> template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry( void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
Heap* heap, Address entry_address) { Heap* heap, HeapObject* object, Address entry_address) {
Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code); heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
code);
StaticVisitor::MarkObject(heap, code); StaticVisitor::MarkObject(heap, code);
} }
...@@ -285,7 +286,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext( ...@@ -285,7 +286,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS; for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
++idx) { ++idx) {
Object** slot = Context::cast(object)->RawFieldOfElementAt(idx); Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
collector->RecordSlot(slot, slot, *slot); collector->RecordSlot(object, slot, *slot);
} }
} }
...@@ -307,7 +308,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map, ...@@ -307,7 +308,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
MarkMapContents(heap, map_object); MarkMapContents(heap, map_object);
} else { } else {
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset), heap, object,
HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(object, Map::kPointerFieldsEndOffset)); HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
} }
} }
...@@ -319,7 +321,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell( ...@@ -319,7 +321,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
heap, heap, object,
HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset), HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset)); HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
} }
...@@ -347,7 +349,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite( ...@@ -347,7 +349,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
heap, heap, object,
HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset), HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset)); HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
} }
...@@ -369,7 +371,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection( ...@@ -369,7 +371,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
// Skip visiting the backing hash table containing the mappings and the // Skip visiting the backing hash table containing the mappings and the
// pointer to the other enqueued weak collections, both are post-processed. // pointer to the other enqueued weak collections, both are post-processed.
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset), heap, object,
HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
HeapObject::RawField(object, JSWeakCollection::kTableOffset)); HeapObject::RawField(object, JSWeakCollection::kTableOffset));
STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize == STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
JSWeakCollection::kNextOffset); JSWeakCollection::kNextOffset);
...@@ -382,7 +385,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection( ...@@ -382,7 +385,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
// Mark the backing hash table without pushing it on the marking stack. // Mark the backing hash table without pushing it on the marking stack.
Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset); Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
HeapObject* obj = HeapObject::cast(*slot); HeapObject* obj = HeapObject::cast(*slot);
heap->mark_compact_collector()->RecordSlot(slot, slot, obj); heap->mark_compact_collector()->RecordSlot(object, slot, obj);
StaticVisitor::MarkObjectWithoutPush(heap, obj); StaticVisitor::MarkObjectWithoutPush(heap, obj);
} }
...@@ -494,7 +497,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map, ...@@ -494,7 +497,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
int last_property_offset = int last_property_offset =
JSRegExp::kSize + kPointerSize * map->inobject_properties(); JSRegExp::kSize + kPointerSize * map->inobject_properties();
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset), map->GetHeap(), object,
HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
HeapObject::RawField(object, last_property_offset)); HeapObject::RawField(object, last_property_offset));
} }
...@@ -505,7 +509,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -505,7 +509,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
heap, heap, object,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) { if (!JSArrayBuffer::cast(object)->is_external()) {
...@@ -519,7 +523,7 @@ template <typename StaticVisitor> ...@@ -519,7 +523,7 @@ template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray( void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
map->GetHeap(), map->GetHeap(), object,
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
} }
...@@ -529,7 +533,7 @@ template <typename StaticVisitor> ...@@ -529,7 +533,7 @@ template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map, void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
HeapObject* object) { HeapObject* object) {
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
map->GetHeap(), map->GetHeap(), object,
HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
} }
...@@ -553,13 +557,14 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap, ...@@ -553,13 +557,14 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
DescriptorArray* descriptors = map->instance_descriptors(); DescriptorArray* descriptors = map->instance_descriptors();
if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) && if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
descriptors->length() > 0) { descriptors->length() > 0) {
StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(), StaticVisitor::VisitPointers(heap, descriptors,
descriptors->GetFirstElementAddress(),
descriptors->GetDescriptorEndSlot(0)); descriptors->GetDescriptorEndSlot(0));
} }
int start = 0; int start = 0;
int end = map->NumberOfOwnDescriptors(); int end = map->NumberOfOwnDescriptors();
if (start < end) { if (start < end) {
StaticVisitor::VisitPointers(heap, StaticVisitor::VisitPointers(heap, descriptors,
descriptors->GetDescriptorStartSlot(start), descriptors->GetDescriptorStartSlot(start),
descriptors->GetDescriptorEndSlot(end)); descriptors->GetDescriptorEndSlot(end));
} }
...@@ -569,7 +574,7 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap, ...@@ -569,7 +574,7 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
// been marked already, it is fine that one of these fields contains a // been marked already, it is fine that one of these fields contains a
// pointer to it. // pointer to it.
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
} }
...@@ -580,13 +585,13 @@ void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray( ...@@ -580,13 +585,13 @@ void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return; if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
if (transitions->HasPrototypeTransitions()) { if (transitions->HasPrototypeTransitions()) {
StaticVisitor::VisitPointer(heap, StaticVisitor::VisitPointer(heap, transitions,
transitions->GetPrototypeTransitionsSlot()); transitions->GetPrototypeTransitionsSlot());
} }
int num_transitions = TransitionArray::NumberOfTransitions(transitions); int num_transitions = TransitionArray::NumberOfTransitions(transitions);
for (int i = 0; i < num_transitions; ++i) { for (int i = 0; i < num_transitions; ++i) {
StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); StaticVisitor::VisitPointer(heap, transitions, transitions->GetKeySlot(i));
} }
} }
...@@ -602,7 +607,7 @@ void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap( ...@@ -602,7 +607,7 @@ void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() && if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() &&
FLAG_age_code && !Code::cast(shared_object)->IsOld()) { FLAG_age_code && !Code::cast(shared_object)->IsOld()) {
StaticVisitor::VisitPointer( StaticVisitor::VisitPointer(
heap, heap, code_map,
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex)); code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex));
} }
} }
...@@ -734,7 +739,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode( ...@@ -734,7 +739,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
object, SharedFunctionInfo::BodyDescriptor::kStartOffset); object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
Object** end_slot = HeapObject::RawField( Object** end_slot = HeapObject::RawField(
object, SharedFunctionInfo::BodyDescriptor::kEndOffset); object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot); StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
} }
...@@ -743,7 +748,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode( ...@@ -743,7 +748,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
Heap* heap, HeapObject* object) { Heap* heap, HeapObject* object) {
Object** name_slot = Object** name_slot =
HeapObject::RawField(object, SharedFunctionInfo::kNameOffset); HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
StaticVisitor::VisitPointer(heap, name_slot); StaticVisitor::VisitPointer(heap, object, name_slot);
// Skip visiting kCodeOffset as it is treated weakly here. // Skip visiting kCodeOffset as it is treated weakly here.
STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize == STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
...@@ -755,7 +760,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode( ...@@ -755,7 +760,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset); HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
Object** end_slot = HeapObject::RawField( Object** end_slot = HeapObject::RawField(
object, SharedFunctionInfo::BodyDescriptor::kEndOffset); object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot); StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
} }
...@@ -766,16 +771,17 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode( ...@@ -766,16 +771,17 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
HeapObject::RawField(object, JSFunction::kPropertiesOffset); HeapObject::RawField(object, JSFunction::kPropertiesOffset);
Object** end_slot = Object** end_slot =
HeapObject::RawField(object, JSFunction::kCodeEntryOffset); HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot); StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); VisitCodeEntry(heap, object,
object->address() + JSFunction::kCodeEntryOffset);
STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
JSFunction::kPrototypeOrInitialMapOffset); JSFunction::kPrototypeOrInitialMapOffset);
start_slot = start_slot =
HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot); StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
} }
...@@ -786,7 +792,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( ...@@ -786,7 +792,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
HeapObject::RawField(object, JSFunction::kPropertiesOffset); HeapObject::RawField(object, JSFunction::kPropertiesOffset);
Object** end_slot = Object** end_slot =
HeapObject::RawField(object, JSFunction::kCodeEntryOffset); HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot); StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
// Skip visiting kCodeEntryOffset as it is treated weakly here. // Skip visiting kCodeEntryOffset as it is treated weakly here.
STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
...@@ -795,7 +801,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( ...@@ -795,7 +801,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
start_slot = start_slot =
HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
StaticVisitor::VisitPointers(heap, start_slot, end_slot); StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
} }
...@@ -839,15 +845,16 @@ void Code::CodeIterateBody(Heap* heap) { ...@@ -839,15 +845,16 @@ void Code::CodeIterateBody(Heap* heap) {
// There are two places where we iterate code bodies: here and the non- // There are two places where we iterate code bodies: here and the non-
// templated CodeIterateBody (above). They should be kept in sync. // templated CodeIterateBody (above). They should be kept in sync.
StaticVisitor::VisitPointer( StaticVisitor::VisitPointer(
heap, heap, this,
reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset)); reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
StaticVisitor::VisitPointer( StaticVisitor::VisitPointer(
heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); heap, this,
reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
StaticVisitor::VisitPointer( StaticVisitor::VisitPointer(
heap, heap, this,
reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
StaticVisitor::VisitPointer( StaticVisitor::VisitPointer(
heap, heap, this,
reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
StaticVisitor::VisitNextCodeLink( StaticVisitor::VisitNextCodeLink(
heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset)); heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
......
...@@ -216,7 +216,7 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) { ...@@ -216,7 +216,7 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
if (record_slots) { if (record_slots) {
Object** next_slot = Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
collector->RecordSlot(next_slot, next_slot, retained); collector->RecordSlot(tail, next_slot, retained);
} }
} }
// Retained object is new tail. // Retained object is new tail.
...@@ -327,8 +327,7 @@ struct WeakListVisitor<Context> { ...@@ -327,8 +327,7 @@ struct WeakListVisitor<Context> {
// Record the updated slot if necessary. // Record the updated slot if necessary.
Object** head_slot = Object** head_slot =
HeapObject::RawField(context, FixedArray::SizeFor(index)); HeapObject::RawField(context, FixedArray::SizeFor(index));
heap->mark_compact_collector()->RecordSlot(head_slot, head_slot, heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
list_head);
} }
} }
......
...@@ -215,7 +215,7 @@ class BodyVisitorBase : public AllStatic { ...@@ -215,7 +215,7 @@ class BodyVisitorBase : public AllStatic {
private: private:
INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object, INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
int start_offset, int end_offset)) { int start_offset, int end_offset)) {
StaticVisitor::VisitPointers(heap, StaticVisitor::VisitPointers(heap, object,
HeapObject::RawField(object, start_offset), HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, end_offset)); HeapObject::RawField(object, end_offset));
} }
...@@ -297,22 +297,23 @@ class StaticNewSpaceVisitor : public StaticVisitorBase { ...@@ -297,22 +297,23 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return table_.GetVisitor(map)(map, obj); return table_.GetVisitor(map)(map, obj);
} }
INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
Object** start, Object** end)) {
for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p); for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
} }
private: private:
INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) { INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
VisitPointers(heap, VisitPointers(heap, object,
HeapObject::RawField(object, JSFunction::kPropertiesOffset), HeapObject::RawField(object, JSFunction::kPropertiesOffset),
HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
// Don't visit code entry. We are using this visitor only during scavenges. // Don't visit code entry. We are using this visitor only during scavenges.
VisitPointers( VisitPointers(
heap, HeapObject::RawField(object, heap, object, HeapObject::RawField(
JSFunction::kCodeEntryOffset + kPointerSize), object, JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset)); HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
return JSFunction::kSize; return JSFunction::kSize;
} }
...@@ -410,7 +411,8 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -410,7 +411,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
INLINE(static void VisitPropertyCell(Map* map, HeapObject* object)); INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
INLINE(static void VisitWeakCell(Map* map, HeapObject* object)); INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address)); INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
Address entry_address));
INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo)); INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo)); INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo)); INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment