Commit 5ba9ea18 authored by ishell's avatar ishell Committed by Commit bot

Avoid manual object's body traversal in GC.

This CL introduces the following visitors:
1) RecordMigratedSlotVisitor which simplifies MarkCompactCollector::MigrateObject().
2) IteratePointersToFromSpaceVisitor which simplifies Heap::IteratePointersToFromSpace().
3) FindPointersToNewSpaceVisitor which simplifies StoreBuffer::IteratePointersToNewSpace().

These changes make the object's body descriptors the one and only place that knows how to traverse the object.

Review URL: https://codereview.chromium.org/1441453002

Cr-Commit-Position: refs/heads/master@{#31992}
parent 06336a7d
......@@ -4479,10 +4479,34 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
}
class IteratePointersToFromSpaceVisitor final : public ObjectVisitor {
public:
IteratePointersToFromSpaceVisitor(Heap* heap, HeapObject* target,
bool record_slots,
ObjectSlotCallback callback)
: heap_(heap),
target_(target),
record_slots_(record_slots),
callback_(callback) {}
V8_INLINE void VisitPointers(Object** start, Object** end) override {
heap_->IterateAndMarkPointersToFromSpace(
target_, reinterpret_cast<Address>(start),
reinterpret_cast<Address>(end), record_slots_, callback_);
}
V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {}
private:
Heap* heap_;
HeapObject* target_;
bool record_slots_;
ObjectSlotCallback callback_;
};
void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
ObjectSlotCallback callback) {
Address obj_address = target->address();
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
......@@ -4495,53 +4519,9 @@ void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
record_slots = Marking::IsBlack(mark_bit);
}
// Do not scavenge JSArrayBuffer's contents
switch (target->ContentType()) {
case HeapObjectContents::kTaggedValues: {
IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
record_slots, callback);
break;
}
case HeapObjectContents::kMixedValues: {
if (target->IsFixedTypedArrayBase()) {
IterateAndMarkPointersToFromSpace(
target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
callback);
} else if (target->IsBytecodeArray()) {
IterateAndMarkPointersToFromSpace(
target, obj_address + BytecodeArray::kConstantPoolOffset,
obj_address + BytecodeArray::kHeaderSize, record_slots, callback);
} else if (target->IsJSArrayBuffer()) {
IterateAndMarkPointersToFromSpace(
target, obj_address,
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
record_slots, callback);
IterateAndMarkPointersToFromSpace(
target, obj_address + JSArrayBuffer::kSize, obj_address + size,
record_slots, callback);
#if V8_DOUBLE_FIELDS_UNBOXING
} else if (FLAG_unbox_double_fields) {
LayoutDescriptorHelper helper(target->map());
DCHECK(!helper.all_fields_tagged());
for (int offset = 0; offset < size;) {
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
target, obj_address + offset,
obj_address + end_of_region_offset, record_slots, callback);
}
offset = end_of_region_offset;
}
#endif
}
break;
}
case HeapObjectContents::kRawValues: {
break;
}
}
IteratePointersToFromSpaceVisitor visitor(this, target, record_slots,
callback);
target->IterateBody(target->map()->instance_type(), size, &visitor);
}
......
......@@ -2361,6 +2361,7 @@ class Heap {
friend class HeapIterator;
friend class IdleScavengeObserver;
friend class IncrementalMarking;
friend class IteratePointersToFromSpaceVisitor;
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class NewSpace;
......
......@@ -2626,6 +2626,40 @@ void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
}
class RecordMigratedSlotVisitor final : public ObjectVisitor {
public:
RecordMigratedSlotVisitor(MarkCompactCollector* collector,
SlotsBuffer** evacuation_slots_buffer)
: collector_(collector),
evacuation_slots_buffer_(evacuation_slots_buffer) {}
V8_INLINE void VisitPointer(Object** p) override {
collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p),
evacuation_slots_buffer_);
}
V8_INLINE void VisitPointers(Object** start, Object** end) override {
while (start < end) {
collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start),
evacuation_slots_buffer_);
++start;
}
}
V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {
if (collector_->compacting_) {
Address code_entry = Memory::Address_at(code_entry_slot);
collector_->RecordMigratedCodeEntrySlot(code_entry, code_entry_slot,
evacuation_slots_buffer_);
}
}
private:
MarkCompactCollector* collector_;
SlotsBuffer** evacuation_slots_buffer_;
};
// We scavenge new space simultaneously with sweeping. This is done in two
// passes.
//
......@@ -2651,26 +2685,10 @@ void MarkCompactCollector::MigrateObject(
DCHECK_OBJECT_SIZE(size);
DCHECK(evacuation_slots_buffer != nullptr);
DCHECK(IsAligned(size, kPointerSize));
switch (src->ContentType()) {
case HeapObjectContents::kTaggedValues:
MigrateObjectTagged(dst, src, size, evacuation_slots_buffer);
break;
case HeapObjectContents::kMixedValues:
MigrateObjectMixed(dst, src, size, evacuation_slots_buffer);
break;
case HeapObjectContents::kRawValues:
MigrateObjectRaw(dst, src, size);
break;
}
if (compacting_ && dst->IsJSFunction()) {
Address code_entry_slot = dst->address() + JSFunction::kCodeEntryOffset;
Address code_entry = Memory::Address_at(code_entry_slot);
RecordMigratedCodeEntrySlot(code_entry, code_entry_slot,
evacuation_slots_buffer);
}
heap()->MoveBlock(dst->address(), src->address(), size);
RecordMigratedSlotVisitor visitor(this, evacuation_slots_buffer);
dst->IterateBody(&visitor);
} else if (dest == CODE_SPACE) {
DCHECK_CODEOBJECT_SIZE(size, heap()->code_space());
DCHECK(evacuation_slots_buffer != nullptr);
......@@ -2689,90 +2707,6 @@ void MarkCompactCollector::MigrateObject(
}
void MarkCompactCollector::MigrateObjectTagged(
HeapObject* dst, HeapObject* src, int size,
SlotsBuffer** evacuation_slots_buffer) {
Address src_slot = src->address();
Address dst_slot = dst->address();
for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
Object* value = Memory::Object_at(src_slot);
Memory::Object_at(dst_slot) = value;
RecordMigratedSlot(value, dst_slot, evacuation_slots_buffer);
src_slot += kPointerSize;
dst_slot += kPointerSize;
}
}
void MarkCompactCollector::MigrateObjectMixed(
HeapObject* dst, HeapObject* src, int size,
SlotsBuffer** evacuation_slots_buffer) {
if (src->IsFixedTypedArrayBase()) {
heap()->MoveBlock(dst->address(), src->address(), size);
Address base_pointer_slot =
dst->address() + FixedTypedArrayBase::kBasePointerOffset;
RecordMigratedSlot(Memory::Object_at(base_pointer_slot), base_pointer_slot,
evacuation_slots_buffer);
} else if (src->IsBytecodeArray()) {
heap()->MoveBlock(dst->address(), src->address(), size);
Address constant_pool_slot =
dst->address() + BytecodeArray::kConstantPoolOffset;
RecordMigratedSlot(Memory::Object_at(constant_pool_slot),
constant_pool_slot, evacuation_slots_buffer);
} else if (src->IsJSArrayBuffer()) {
heap()->MoveBlock(dst->address(), src->address(), size);
// Visit inherited JSObject properties and byte length of ArrayBuffer
Address regular_slot = dst->address() + JSArrayBuffer::kPropertiesOffset;
Address regular_slots_end =
dst->address() + JSArrayBuffer::kByteLengthOffset + kPointerSize;
while (regular_slot < regular_slots_end) {
RecordMigratedSlot(Memory::Object_at(regular_slot), regular_slot,
evacuation_slots_buffer);
regular_slot += kPointerSize;
}
// Skip backing store and visit just internal fields
Address internal_field_slot = dst->address() + JSArrayBuffer::kSize;
Address internal_fields_end =
dst->address() + JSArrayBuffer::kSizeWithInternalFields;
while (internal_field_slot < internal_fields_end) {
RecordMigratedSlot(Memory::Object_at(internal_field_slot),
internal_field_slot, evacuation_slots_buffer);
internal_field_slot += kPointerSize;
}
} else if (FLAG_unbox_double_fields) {
Address dst_addr = dst->address();
Address src_addr = src->address();
Address src_slot = src_addr;
Address dst_slot = dst_addr;
LayoutDescriptorHelper helper(src->map());
DCHECK(!helper.all_fields_tagged());
for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
Object* value = Memory::Object_at(src_slot);
Memory::Object_at(dst_slot) = value;
if (helper.IsTagged(static_cast<int>(src_slot - src_addr))) {
RecordMigratedSlot(value, dst_slot, evacuation_slots_buffer);
}
src_slot += kPointerSize;
dst_slot += kPointerSize;
}
} else {
UNREACHABLE();
}
}
void MarkCompactCollector::MigrateObjectRaw(HeapObject* dst, HeapObject* src,
int size) {
heap()->MoveBlock(dst->address(), src->address(), size);
}
static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v,
SlotsBuffer::SlotType slot_type, Address addr) {
switch (slot_type) {
......@@ -3115,45 +3049,8 @@ bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
}
DCHECK(object != NULL);
switch (object->ContentType()) {
case HeapObjectContents::kTaggedValues:
return true;
case HeapObjectContents::kRawValues: {
InstanceType type = object->map()->instance_type();
// Slots in maps and code can't be invalid because they are never
// shrunk.
if (type == MAP_TYPE || type == CODE_TYPE) return true;
// Consider slots in objects that contain ONLY raw data as invalid.
return false;
}
case HeapObjectContents::kMixedValues: {
if (object->IsFixedTypedArrayBase()) {
return static_cast<int>(slot - object->address()) ==
FixedTypedArrayBase::kBasePointerOffset;
} else if (object->IsBytecodeArray()) {
return static_cast<int>(slot - object->address()) ==
BytecodeArray::kConstantPoolOffset;
} else if (object->IsJSArrayBuffer()) {
int off = static_cast<int>(slot - object->address());
return (off >= JSArrayBuffer::kPropertiesOffset &&
off <= JSArrayBuffer::kByteLengthOffset) ||
(off >= JSArrayBuffer::kSize &&
off < JSArrayBuffer::kSizeWithInternalFields);
} else if (FLAG_unbox_double_fields) {
// Filter out slots that happen to point to unboxed double fields.
LayoutDescriptorHelper helper(object->map());
DCHECK(!helper.all_fields_tagged());
return helper.IsTagged(static_cast<int>(slot - object->address()));
}
break;
}
}
UNREACHABLE();
return true;
int offset = static_cast<int>(slot - object->address());
return object->IsValidSlot(offset);
}
......
......@@ -416,12 +416,6 @@ class MarkCompactCollector {
AllocationSpace to_old_space,
SlotsBuffer** evacuation_slots_buffer);
void MigrateObjectTagged(HeapObject* dst, HeapObject* src, int size,
SlotsBuffer** evacuation_slots_buffer);
void MigrateObjectMixed(HeapObject* dst, HeapObject* src, int size,
SlotsBuffer** evacuation_slots_buffer);
void MigrateObjectRaw(HeapObject* dst, HeapObject* src, int size);
bool TryPromoteObject(HeapObject* object, int object_size);
void InvalidateCode(Code* code);
......@@ -570,11 +564,12 @@ class MarkCompactCollector {
// After: Live objects are marked and non-live objects are unmarked.
friend class CodeMarkingVisitor;
friend class IncrementalMarkingMarkingVisitor;
friend class MarkCompactMarkingVisitor;
friend class MarkingVisitor;
friend class RecordMigratedSlotVisitor;
friend class RootMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
friend class IncrementalMarkingMarkingVisitor;
// Mark code objects that are active on the stack to prevent them
// from being flushed.
......
......@@ -3212,11 +3212,6 @@ void LargeObjectSpace::Verify() {
CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map));
// Double unboxing in LO space is not allowed. This would break the
// lookup mechanism for store and slot buffer entries which use the
// page header tag.
CHECK(object->ContentType() != HeapObjectContents::kMixedValues);
// We have only code, sequential strings, external strings
// (sequential strings that have been morphed into external
// strings), fixed arrays, byte arrays, and constant pool arrays in the
......
......@@ -412,6 +412,26 @@ void StoreBuffer::VerifyValidStoreBufferEntries() {
}
class FindPointersToNewSpaceVisitor final : public ObjectVisitor {
public:
FindPointersToNewSpaceVisitor(StoreBuffer* store_buffer,
ObjectSlotCallback callback)
: store_buffer_(store_buffer), callback_(callback) {}
V8_INLINE void VisitPointers(Object** start, Object** end) override {
store_buffer_->FindPointersToNewSpaceInRegion(
reinterpret_cast<Address>(start), reinterpret_cast<Address>(end),
callback_);
}
V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {}
private:
StoreBuffer* store_buffer_;
ObjectSlotCallback callback_;
};
void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
// We do not sort or remove duplicated entries from the store buffer because
// we expect that callback will rebuild the store buffer thus removing
......@@ -438,6 +458,7 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
}
PointerChunkIterator it(heap_);
MemoryChunk* chunk;
FindPointersToNewSpaceVisitor visitor(this, slot_callback);
while ((chunk = it.next()) != NULL) {
if (chunk->scan_on_scavenge()) {
chunk->set_scan_on_scavenge(false);
......@@ -475,63 +496,7 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
for (HeapObject* heap_object = iterator.Next(); heap_object != NULL;
heap_object = iterator.Next()) {
// We iterate over objects that contain new space pointers only.
Address obj_address = heap_object->address();
const int start_offset = HeapObject::kHeaderSize;
const int end_offset = heap_object->Size();
switch (heap_object->ContentType()) {
case HeapObjectContents::kTaggedValues: {
Address start_address = obj_address + start_offset;
Address end_address = obj_address + end_offset;
// Object has only tagged fields.
FindPointersToNewSpaceInRegion(start_address, end_address,
slot_callback);
break;
}
case HeapObjectContents::kMixedValues: {
if (heap_object->IsFixedTypedArrayBase()) {
FindPointersToNewSpaceInRegion(
obj_address + FixedTypedArrayBase::kBasePointerOffset,
obj_address + FixedTypedArrayBase::kHeaderSize,
slot_callback);
} else if (heap_object->IsBytecodeArray()) {
FindPointersToNewSpaceInRegion(
obj_address + BytecodeArray::kConstantPoolOffset,
obj_address + BytecodeArray::kHeaderSize,
slot_callback);
} else if (heap_object->IsJSArrayBuffer()) {
FindPointersToNewSpaceInRegion(
obj_address + JSArrayBuffer::kPropertiesOffset,
obj_address + JSArrayBuffer::kByteLengthOffset +
kPointerSize,
slot_callback);
FindPointersToNewSpaceInRegion(
obj_address + JSArrayBuffer::kSize,
obj_address + JSArrayBuffer::kSizeWithInternalFields,
slot_callback);
} else if (FLAG_unbox_double_fields) {
LayoutDescriptorHelper helper(heap_object->map());
DCHECK(!helper.all_fields_tagged());
for (int offset = start_offset; offset < end_offset;) {
int end_of_region_offset;
if (helper.IsTagged(offset, end_offset,
&end_of_region_offset)) {
FindPointersToNewSpaceInRegion(
obj_address + offset,
obj_address + end_of_region_offset, slot_callback);
}
offset = end_of_region_offset;
}
} else {
UNREACHABLE();
}
break;
}
case HeapObjectContents::kRawValues:
break;
}
heap_object->IterateBody(&visitor);
}
}
}
......
......@@ -162,8 +162,9 @@ class StoreBuffer {
void VerifyPointers(LargeObjectSpace* space);
#endif
friend class StoreBufferRebuildScope;
friend class DontMoveStoreBufferEntriesScope;
friend class FindPointersToNewSpaceVisitor;
friend class StoreBufferRebuildScope;
};
......
......@@ -1494,46 +1494,6 @@ int HeapObject::Size() {
}
HeapObjectContents HeapObject::ContentType() {
InstanceType type = map()->instance_type();
if (type <= LAST_NAME_TYPE) {
if (type == SYMBOL_TYPE) {
return HeapObjectContents::kTaggedValues;
}
DCHECK(type < FIRST_NONSTRING_TYPE);
// There are four string representations: sequential strings, external
// strings, cons strings, and sliced strings.
// Only the former two contain raw values and no heap pointers (besides the
// map-word).
if (((type & kIsIndirectStringMask) != kIsIndirectStringTag))
return HeapObjectContents::kRawValues;
else
return HeapObjectContents::kTaggedValues;
#if 0
// TODO(jochen): Enable eventually.
} else if (type == JS_FUNCTION_TYPE) {
return HeapObjectContents::kMixedValues;
#endif
} else if (type == BYTECODE_ARRAY_TYPE) {
return HeapObjectContents::kMixedValues;
} else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
return HeapObjectContents::kMixedValues;
} else if (type == JS_ARRAY_BUFFER_TYPE) {
return HeapObjectContents::kMixedValues;
} else if (type <= LAST_DATA_TYPE) {
// TODO(jochen): Why do we claim that Code and Map contain only raw values?
return HeapObjectContents::kRawValues;
} else {
if (FLAG_unbox_double_fields) {
LayoutDescriptorHelper helper(map());
if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues;
}
return HeapObjectContents::kTaggedValues;
}
}
double HeapNumber::value() const {
return READ_DOUBLE_FIELD(this, kValueOffset);
}
......
......@@ -1504,13 +1504,6 @@ class MapWord BASE_EMBEDDED {
};
// The content of an heap object (except for the map pointer). kTaggedValues
// objects can contain both heap pointers and Smis, kMixedValues can contain
// heap pointers, Smis, and raw values (e.g. doubles or strings), and kRawValues
// objects can contain raw values and Smis.
enum class HeapObjectContents { kTaggedValues, kMixedValues, kRawValues };
// HeapObject is the superclass for all classes describing heap allocated
// objects.
class HeapObject: public Object {
......@@ -1587,9 +1580,6 @@ class HeapObject: public Object {
// Returns the heap object's size in bytes
inline int Size();
// Indicates what type of values this heap object may contain.
inline HeapObjectContents ContentType();
// Given a heap object's map pointer, returns the heap size in bytes
// Useful when the map pointer field is used for other purposes.
// GC internal.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment