Commit c7456abf authored by hpayer's avatar hpayer Committed by Commit bot

Change RecordSlot interface. Make it more robust by replacing anchor slot with actual object.

BUG=

Review URL: https://codereview.chromium.org/1259613006

Cr-Commit-Position: refs/heads/master@{#30007}
parent 1813f80d
......@@ -2084,15 +2084,17 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
record_slots, obj_address + offset,
obj_address + end_of_region_offset, &ScavengeObject);
target, obj_address + offset,
obj_address + end_of_region_offset, record_slots,
&ScavengeObject);
}
offset = end_of_region_offset;
}
} else {
#endif
IterateAndMarkPointersToFromSpace(
record_slots, obj_address, obj_address + size, &ScavengeObject);
IterateAndMarkPointersToFromSpace(target, obj_address,
obj_address + size, record_slots,
&ScavengeObject);
#if V8_DOUBLE_FIELDS_UNBOXING
}
#endif
......@@ -2418,7 +2420,7 @@ class ScavengingVisitor : public StaticVisitorBase {
target->address() + JSFunction::kCodeEntryOffset;
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
code_entry_slot, code);
target, code_entry_slot, code);
}
}
......@@ -3582,8 +3584,8 @@ void Heap::AddAllocationSiteToScratchpad(AllocationSite* site,
// candidates are not part of the global list of old space pages and
// releasing an evacuation candidate due to a slots buffer overflow
// results in lost pages.
mark_compact_collector()->RecordSlot(slot, slot, *slot,
SlotsBuffer::IGNORE_OVERFLOW);
mark_compact_collector()->RecordSlot(allocation_sites_scratchpad(), slot,
*slot, SlotsBuffer::IGNORE_OVERFLOW);
}
allocation_sites_scratchpad_length_++;
}
......@@ -5132,33 +5134,33 @@ void Heap::ZapFromSpace() {
}
void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
Address end,
void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
Address end, bool record_slots,
ObjectSlotCallback callback) {
Address slot_address = start;
while (slot_address < end) {
Object** slot = reinterpret_cast<Object**>(slot_address);
Object* object = *slot;
Object* target = *slot;
// If the store buffer becomes overfull we mark pages as being exempt from
// the store buffer. These pages are scanned to find pointers that point
// to the new space. In that case we may hit newly promoted objects and
// fix the pointers before the promotion queue gets to them. Thus the 'if'.
if (object->IsHeapObject()) {
if (Heap::InFromSpace(object)) {
if (target->IsHeapObject()) {
if (Heap::InFromSpace(target)) {
callback(reinterpret_cast<HeapObject**>(slot),
HeapObject::cast(object));
Object* new_object = *slot;
if (InNewSpace(new_object)) {
SLOW_DCHECK(Heap::InToSpace(new_object));
SLOW_DCHECK(new_object->IsHeapObject());
HeapObject::cast(target));
Object* new_target = *slot;
if (InNewSpace(new_target)) {
SLOW_DCHECK(Heap::InToSpace(new_target));
SLOW_DCHECK(new_target->IsHeapObject());
store_buffer_.EnterDirectlyIntoStoreBuffer(
reinterpret_cast<Address>(slot));
}
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_target));
} else if (record_slots &&
MarkCompactCollector::IsOnEvacuationCandidate(object)) {
mark_compact_collector()->RecordSlot(slot, slot, object);
MarkCompactCollector::IsOnEvacuationCandidate(target)) {
mark_compact_collector()->RecordSlot(object, slot, target);
}
}
slot_address += kPointerSize;
......
......@@ -952,9 +952,9 @@ class Heap {
void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
// Iterate pointers to from semispace of new space found in memory interval
// from start to end.
void IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
Address end,
// from start to end within |object|.
void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
Address end, bool record_slots,
ObjectSlotCallback callback);
// Returns whether the object resides in new space.
......
......@@ -40,8 +40,7 @@ void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
MarkBit obj_bit = Marking::MarkBitFrom(obj);
if (Marking::IsBlack(obj_bit)) {
// Object is not going to be rescanned we need to record the slot.
heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0),
slot, value);
heap_->mark_compact_collector()->RecordSlot(obj, slot, value);
}
}
}
......@@ -92,7 +91,7 @@ void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
if (BaseRecordWrite(host, slot, value)) {
DCHECK(slot != NULL);
heap_->mark_compact_collector()->RecordCodeEntrySlot(
reinterpret_cast<Address>(slot), value);
host, reinterpret_cast<Address>(slot), value);
}
}
......@@ -177,9 +176,8 @@ class IncrementalMarkingMarkingVisitor
int already_scanned_offset = start_offset;
bool scan_until_end = false;
do {
VisitPointersWithAnchor(heap, HeapObject::RawField(object, 0),
HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, end_offset));
VisitPointers(heap, object, HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, end_offset));
start_offset = end_offset;
end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
scan_until_end =
......@@ -214,31 +212,21 @@ class IncrementalMarkingMarkingVisitor
VisitNativeContext(map, context);
}
INLINE(static void VisitPointer(Heap* heap, Object** p)) {
Object* obj = *p;
if (obj->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(p, p, obj);
MarkObject(heap, obj);
}
}
INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
for (Object** p = start; p < end; p++) {
Object* obj = *p;
if (obj->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(start, p, obj);
MarkObject(heap, obj);
}
INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
Object* target = *p;
if (target->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(object, p, target);
MarkObject(heap, target);
}
}
INLINE(static void VisitPointersWithAnchor(Heap* heap, Object** anchor,
Object** start, Object** end)) {
INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
Object** start, Object** end)) {
for (Object** p = start; p < end; p++) {
Object* obj = *p;
if (obj->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(anchor, p, obj);
MarkObject(heap, obj);
Object* target = *p;
if (target->IsHeapObject()) {
heap->mark_compact_collector()->RecordSlot(object, p, target);
MarkObject(heap, target);
}
}
}
......
......@@ -55,15 +55,15 @@ bool MarkCompactCollector::IsMarked(Object* obj) {
}
void MarkCompactCollector::RecordSlot(Object** anchor_slot, Object** slot,
Object* object,
void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
Object* target,
SlotsBuffer::AdditionMode mode) {
Page* object_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (object_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(anchor_slot)) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(object)) {
if (!SlotsBuffer::AddTo(&slots_buffer_allocator_,
object_page->slots_buffer_address(), slot, mode)) {
EvictPopularEvacuationCandidate(object_page);
target_page->slots_buffer_address(), slot, mode)) {
EvictPopularEvacuationCandidate(target_page);
}
}
}
......
This diff is collapsed.
......@@ -640,11 +640,6 @@ class MarkCompactCollector {
void VerifyOmittedMapChecks();
#endif
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object** anchor)) {
return Page::FromAddress(reinterpret_cast<Address>(anchor))
->ShouldSkipEvacuationSlotRecording();
}
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) {
return Page::FromAddress(reinterpret_cast<Address>(host))
->ShouldSkipEvacuationSlotRecording();
......@@ -656,11 +651,11 @@ class MarkCompactCollector {
}
void RecordRelocSlot(RelocInfo* rinfo, Object* target);
void RecordCodeEntrySlot(Address slot, Code* target);
void RecordCodeEntrySlot(HeapObject* object, Address slot, Code* target);
void RecordCodeTargetPatch(Address pc, Code* target);
INLINE(void RecordSlot(
Object** anchor_slot, Object** slot, Object* object,
HeapObject* object, Object** slot, Object* target,
SlotsBuffer::AdditionMode mode = SlotsBuffer::FAIL_ON_OVERFLOW));
void MigrateObject(HeapObject* dst, HeapObject* src, int size,
......
This diff is collapsed.
......@@ -216,7 +216,7 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
if (record_slots) {
Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
collector->RecordSlot(next_slot, next_slot, retained);
collector->RecordSlot(tail, next_slot, retained);
}
}
// Retained object is new tail.
......@@ -327,8 +327,7 @@ struct WeakListVisitor<Context> {
// Record the updated slot if necessary.
Object** head_slot =
HeapObject::RawField(context, FixedArray::SizeFor(index));
heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
list_head);
heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
}
}
......
......@@ -215,7 +215,7 @@ class BodyVisitorBase : public AllStatic {
private:
INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
int start_offset, int end_offset)) {
StaticVisitor::VisitPointers(heap,
StaticVisitor::VisitPointers(heap, object,
HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, end_offset));
}
......@@ -297,22 +297,23 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return table_.GetVisitor(map)(map, obj);
}
INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
Object** start, Object** end)) {
for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
}
private:
INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
Heap* heap = map->GetHeap();
VisitPointers(heap,
VisitPointers(heap, object,
HeapObject::RawField(object, JSFunction::kPropertiesOffset),
HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
// Don't visit code entry. We are using this visitor only during scavenges.
VisitPointers(
heap, HeapObject::RawField(object,
JSFunction::kCodeEntryOffset + kPointerSize),
heap, object, HeapObject::RawField(
object, JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
return JSFunction::kSize;
}
......@@ -410,7 +411,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
Address entry_address));
INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment