Commit ef6cb2dd authored by vegorov@chromium.org's avatar vegorov@chromium.org

Use virtually dispatched specialized scavengers instead of single generic...

Use virtually dispatched specialized scavengers instead of single generic ScavengeObjectSlow implementation.

Review URL: http://codereview.chromium.org/2895008

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5041 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 4329a58b
...@@ -812,6 +812,9 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global, ...@@ -812,6 +812,9 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
initial_map->set_instance_size( initial_map->set_instance_size(
initial_map->instance_size() + 5 * kPointerSize); initial_map->instance_size() + 5 * kPointerSize);
initial_map->set_instance_descriptors(*descriptors); initial_map->set_instance_descriptors(*descriptors);
initial_map->set_scavenger(
Heap::GetScavenger(initial_map->instance_type(),
initial_map->instance_size()));
} }
{ // -- J S O N { // -- J S O N
......
...@@ -277,6 +277,8 @@ Handle<Map> Factory::CopyMap(Handle<Map> src, ...@@ -277,6 +277,8 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
copy->set_inobject_properties(inobject_properties); copy->set_inobject_properties(inobject_properties);
copy->set_unused_property_fields(inobject_properties); copy->set_unused_property_fields(inobject_properties);
copy->set_instance_size(copy->instance_size() + instance_size_delta); copy->set_instance_size(copy->instance_size() + instance_size_delta);
copy->set_scavenger(Heap::GetScavenger(copy->instance_type(),
copy->instance_size()));
return copy; return copy;
} }
......
...@@ -799,34 +799,34 @@ class ScavengeVisitor: public ObjectVisitor { ...@@ -799,34 +799,34 @@ class ScavengeVisitor: public ObjectVisitor {
}; };
// A queue of pointers and maps of to-be-promoted objects during a // A queue of objects promoted during scavenge. Each object is accompanied
// scavenge collection. // by it's size to avoid dereferencing a map pointer for scanning.
class PromotionQueue { class PromotionQueue {
public: public:
void Initialize(Address start_address) { void Initialize(Address start_address) {
front_ = rear_ = reinterpret_cast<HeapObject**>(start_address); front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
} }
bool is_empty() { return front_ <= rear_; } bool is_empty() { return front_ <= rear_; }
void insert(HeapObject* object, Map* map) { void insert(HeapObject* target, int size) {
*(--rear_) = object; *(--rear_) = reinterpret_cast<intptr_t>(target);
*(--rear_) = map; *(--rear_) = size;
// Assert no overflow into live objects. // Assert no overflow into live objects.
ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top()); ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
} }
void remove(HeapObject** object, Map** map) { void remove(HeapObject** target, int* size) {
*object = *(--front_); *target = reinterpret_cast<HeapObject*>(*(--front_));
*map = Map::cast(*(--front_)); *size = *(--front_);
// Assert no underflow. // Assert no underflow.
ASSERT(front_ >= rear_); ASSERT(front_ >= rear_);
} }
private: private:
// The front of the queue is higher in memory than the rear. // The front of the queue is higher in memory than the rear.
HeapObject** front_; intptr_t* front_;
HeapObject** rear_; intptr_t* rear_;
}; };
...@@ -1041,31 +1041,26 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, ...@@ -1041,31 +1041,26 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
// queue is empty. // queue is empty.
while (new_space_front < new_space_.top()) { while (new_space_front < new_space_.top()) {
HeapObject* object = HeapObject::FromAddress(new_space_front); HeapObject* object = HeapObject::FromAddress(new_space_front);
object->Iterate(scavenge_visitor); Map* map = object->map();
new_space_front += object->Size(); int size = object->SizeFromMap(map);
object->IterateBody(map->instance_type(), size, scavenge_visitor);
new_space_front += size;
} }
// Promote and process all the to-be-promoted objects. // Promote and process all the to-be-promoted objects.
while (!promotion_queue.is_empty()) { while (!promotion_queue.is_empty()) {
HeapObject* source; HeapObject* target;
Map* map; int size;
promotion_queue.remove(&source, &map); promotion_queue.remove(&target, &size);
// Copy the from-space object to its new location (given by the
// forwarding address) and fix its map. // Promoted object might be already partially visited
HeapObject* target = source->map_word().ToForwardingAddress(); // during dirty regions iteration. Thus we search specificly
int size = source->SizeFromMap(map); // for pointers to from semispace instead of looking for pointers
CopyBlock(target->address(), source->address(), size); // to new space.
target->set_map(map);
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Update NewSpace stats if necessary.
RecordCopiedObject(target);
#endif
// Visit the newly copied object for pointers to new space.
ASSERT(!target->IsMap()); ASSERT(!target->IsMap());
IterateAndMarkPointersToNewSpace(target->address(), IterateAndMarkPointersToFromSpace(target->address(),
target->address() + size, target->address() + size,
&ScavengePointer); &ScavengePointer);
} }
// Take another spin if there are now unswept objects in new space // Take another spin if there are now unswept objects in new space
...@@ -1077,7 +1072,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, ...@@ -1077,7 +1072,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
void Heap::RecordCopiedObject(HeapObject* obj) { static void RecordCopiedObject(HeapObject* obj) {
bool should_record = false; bool should_record = false;
#ifdef DEBUG #ifdef DEBUG
should_record = FLAG_heap_stats; should_record = FLAG_heap_stats;
...@@ -1086,22 +1081,24 @@ void Heap::RecordCopiedObject(HeapObject* obj) { ...@@ -1086,22 +1081,24 @@ void Heap::RecordCopiedObject(HeapObject* obj) {
should_record = should_record || FLAG_log_gc; should_record = should_record || FLAG_log_gc;
#endif #endif
if (should_record) { if (should_record) {
if (new_space_.Contains(obj)) { if (Heap::new_space()->Contains(obj)) {
new_space_.RecordAllocation(obj); Heap::new_space()->RecordAllocation(obj);
} else { } else {
new_space_.RecordPromotion(obj); Heap::new_space()->RecordPromotion(obj);
} }
} }
} }
#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Helper function used by CopyObject to copy a source object to an
HeapObject* Heap::MigrateObject(HeapObject* source, // allocated target object and update the forwarding pointer in the source
HeapObject* target, // object. Returns the target object.
int size) { inline static HeapObject* MigrateObject(HeapObject* source,
HeapObject* target,
int size) {
// Copy the content of source to target. // Copy the content of source to target.
CopyBlock(target->address(), source->address(), size); Heap::CopyBlock(target->address(), source->address(), size);
// Set the forwarding address. // Set the forwarding address.
source->set_map_word(MapWord::FromForwardingAddress(target)); source->set_map_word(MapWord::FromForwardingAddress(target));
...@@ -1115,117 +1112,281 @@ HeapObject* Heap::MigrateObject(HeapObject* source, ...@@ -1115,117 +1112,281 @@ HeapObject* Heap::MigrateObject(HeapObject* source,
} }
static inline bool IsShortcutCandidate(HeapObject* object, Map* map) { enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0); enum SizeRestriction { SMALL, UNKNOWN_SIZE };
ASSERT(object->map() == map);
InstanceType type = map->instance_type();
if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false; template<ObjectContents object_contents, SizeRestriction size_restriction>
ASSERT(object->IsString() && !object->IsSymbol()); static inline void EvacuateObject(Map* map,
return ConsString::cast(object)->unchecked_second() == Heap::empty_string(); HeapObject** slot,
HeapObject* object,
int object_size) {
ASSERT((size_restriction != SMALL) ||
(object_size <= Page::kMaxHeapObjectSize));
ASSERT(object->Size() == object_size);
if (Heap::ShouldBePromoted(object->address(), object_size)) {
Object* result;
if ((size_restriction != SMALL) &&
(object_size > Page::kMaxHeapObjectSize)) {
result = Heap::lo_space()->AllocateRawFixedArray(object_size);
} else {
if (object_contents == DATA_OBJECT) {
result = Heap::old_data_space()->AllocateRaw(object_size);
} else {
result = Heap::old_pointer_space()->AllocateRaw(object_size);
}
}
if (!result->IsFailure()) {
HeapObject* target = HeapObject::cast(result);
*slot = MigrateObject(object, target, object_size);
if (object_contents == POINTER_OBJECT) {
promotion_queue.insert(target, object_size);
}
Heap::tracer()->increment_promoted_objects_size(object_size);
return;
}
}
Object* result = Heap::new_space()->AllocateRaw(object_size);
ASSERT(!result->IsFailure());
*slot = MigrateObject(object, HeapObject::cast(result), object_size);
return;
} }
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { template<int object_size_in_words, ObjectContents object_contents>
ASSERT(InFromSpace(object)); static inline void EvacuateObjectOfFixedSize(Map* map,
MapWord first_word = object->map_word(); HeapObject** slot,
ASSERT(!first_word.IsForwardingAddress()); HeapObject* object) {
const int object_size = object_size_in_words << kPointerSizeLog2;
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
}
// Optimization: Bypass flattened ConsString objects.
if (IsShortcutCandidate(object, first_word.ToMap())) { template<ObjectContents object_contents>
object = HeapObject::cast(ConsString::cast(object)->unchecked_first()); static inline void EvacuateObjectOfFixedSize(Map* map,
*p = object; HeapObject** slot,
// After patching *p we have to repeat the checks that object is in the HeapObject* object) {
// active semispace of the young generation and not already copied. int object_size = map->instance_size();
if (!InNewSpace(object)) return; EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
first_word = object->map_word(); }
static inline void EvacuateFixedArray(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = FixedArray::cast(object)->FixedArraySize();
EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static inline void EvacuateByteArray(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = ByteArray::cast(object)->ByteArraySize();
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static Scavenger GetScavengerForSize(int object_size,
ObjectContents object_contents) {
ASSERT(IsAligned(object_size, kPointerSize));
ASSERT(object_size < Page::kMaxHeapObjectSize);
switch (object_size >> kPointerSizeLog2) {
#define CASE(n) \
case n: \
if (object_contents == DATA_OBJECT) { \
return static_cast<Scavenger>( \
&EvacuateObjectOfFixedSize<n, DATA_OBJECT>); \
} else { \
return static_cast<Scavenger>( \
&EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
}
CASE(1);
CASE(2);
CASE(3);
CASE(4);
CASE(5);
CASE(6);
CASE(7);
CASE(8);
CASE(9);
CASE(10);
CASE(11);
CASE(12);
CASE(13);
CASE(14);
CASE(15);
CASE(16);
default:
if (object_contents == DATA_OBJECT) {
return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
} else {
return static_cast<Scavenger>(
&EvacuateObjectOfFixedSize<POINTER_OBJECT>);
}
#undef CASE
}
}
static inline void EvacuateSeqAsciiString(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static inline void EvacuateSeqTwoByteString(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static inline bool IsShortcutCandidate(int type) {
return ((type & kShortcutTypeMask) == kShortcutTypeTag);
}
static inline void EvacuateShortcutCandidate(Map* map,
HeapObject** slot,
HeapObject* object) {
ASSERT(IsShortcutCandidate(map->instance_type()));
if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
HeapObject* first =
HeapObject::cast(ConsString::cast(object)->unchecked_first());
*slot = first;
if (!Heap::InNewSpace(first)) {
object->set_map_word(MapWord::FromForwardingAddress(first));
return;
}
MapWord first_word = first->map_word();
if (first_word.IsForwardingAddress()) { if (first_word.IsForwardingAddress()) {
*p = first_word.ToForwardingAddress(); HeapObject* target = first_word.ToForwardingAddress();
*slot = target;
object->set_map_word(MapWord::FromForwardingAddress(target));
return; return;
} }
first->map()->Scavenge(slot, first);
object->set_map_word(MapWord::FromForwardingAddress(*slot));
return;
} }
int object_size = object->SizeFromMap(first_word.ToMap()); int object_size = ConsString::kSize;
// We rely on live objects in new space to be at least two pointers, EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
// so we can store the from-space address and map pointer of promoted }
// objects in the to space.
ASSERT(object_size >= 2 * kPointerSize);
// If the object should be promoted, we try to copy it to old space. Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
if (ShouldBePromoted(object->address(), object_size)) { if (instance_type < FIRST_NONSTRING_TYPE) {
Object* result; switch (instance_type & kStringRepresentationMask) {
if (object_size > MaxObjectSizeInPagedSpace()) { case kSeqStringTag:
result = lo_space_->AllocateRawFixedArray(object_size); if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
if (!result->IsFailure()) { return &EvacuateSeqAsciiString;
HeapObject* target = HeapObject::cast(result);
if (object->IsFixedArray()) {
// Save the from-space object pointer and its map pointer at the
// top of the to space to be swept and copied later. Write the
// forwarding address over the map word of the from-space
// object.
promotion_queue.insert(object, first_word.ToMap());
object->set_map_word(MapWord::FromForwardingAddress(target));
// Give the space allocated for the result a proper map by
// treating it as a free list node (not linked into the free
// list).
FreeListNode* node = FreeListNode::FromAddress(target->address());
node->set_size(object_size);
*p = target;
} else { } else {
// In large object space only fixed arrays might possibly contain return &EvacuateSeqTwoByteString;
// intergenerational references.
// All other objects can be copied immediately and not revisited.
*p = MigrateObject(object, target, object_size);
} }
tracer()->increment_promoted_objects_size(object_size); case kConsStringTag:
return; if (IsShortcutCandidate(instance_type)) {
} return &EvacuateShortcutCandidate;
} else {
OldSpace* target_space = Heap::TargetSpace(object);
ASSERT(target_space == Heap::old_pointer_space_ ||
target_space == Heap::old_data_space_);
result = target_space->AllocateRaw(object_size);
if (!result->IsFailure()) {
HeapObject* target = HeapObject::cast(result);
if (target_space == Heap::old_pointer_space_) {
// Save the from-space object pointer and its map pointer at the
// top of the to space to be swept and copied later. Write the
// forwarding address over the map word of the from-space
// object.
promotion_queue.insert(object, first_word.ToMap());
object->set_map_word(MapWord::FromForwardingAddress(target));
// Give the space allocated for the result a proper map by
// treating it as a free list node (not linked into the free
// list).
FreeListNode* node = FreeListNode::FromAddress(target->address());
node->set_size(object_size);
*p = target;
} else { } else {
// Objects promoted to the data space can be copied immediately ASSERT(instance_size == ConsString::kSize);
// and not revisited---we will never sweep that space for return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
// pointers and the copied objects do not contain pointers to
// new space objects.
*p = MigrateObject(object, target, object_size);
#ifdef DEBUG
VerifyNonPointerSpacePointersVisitor v;
(*p)->Iterate(&v);
#endif
} }
tracer()->increment_promoted_objects_size(object_size);
return; case kExternalStringTag:
} ASSERT(instance_size == ExternalString::kSize);
return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
} }
UNREACHABLE();
} }
// The object should remain in new space or the old space allocation failed.
Object* result = new_space_.AllocateRaw(object_size); switch (instance_type) {
// Failed allocation at this point is utterly unexpected. case BYTE_ARRAY_TYPE:
ASSERT(!result->IsFailure()); return reinterpret_cast<Scavenger>(&EvacuateByteArray);
*p = MigrateObject(object, HeapObject::cast(result), object_size);
case FIXED_ARRAY_TYPE:
return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
case JS_ARRAY_TYPE:
case JS_REGEXP_TYPE:
case JS_FUNCTION_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
return GetScavengerForSize(instance_size, POINTER_OBJECT);
case ODDBALL_TYPE:
return NULL;
case PROXY_TYPE:
return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
case MAP_TYPE:
return NULL;
case CODE_TYPE:
return NULL;
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return NULL;
case HEAP_NUMBER_TYPE:
case FILLER_TYPE:
case PIXEL_ARRAY_TYPE:
case EXTERNAL_BYTE_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
case EXTERNAL_SHORT_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
case EXTERNAL_INT_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
case EXTERNAL_FLOAT_ARRAY_TYPE:
return GetScavengerForSize(instance_size, DATA_OBJECT);
case SHARED_FUNCTION_INFO_TYPE:
return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
POINTER_OBJECT);
#define MAKE_STRUCT_CASE(NAME, Name, name) \
case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
return GetScavengerForSize(instance_size, POINTER_OBJECT);
default:
UNREACHABLE();
return NULL;
}
}
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
ASSERT(InFromSpace(object));
MapWord first_word = object->map_word();
ASSERT(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
map->Scavenge(p, object);
} }
...@@ -1243,6 +1404,8 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type, ...@@ -1243,6 +1404,8 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
reinterpret_cast<Map*>(result)->set_instance_type(instance_type); reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size); reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
reinterpret_cast<Map*>(result)->
set_scavenger(GetScavenger(instance_type, instance_size));
reinterpret_cast<Map*>(result)->set_inobject_properties(0); reinterpret_cast<Map*>(result)->set_inobject_properties(0);
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
reinterpret_cast<Map*>(result)->set_unused_property_fields(0); reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
...@@ -1259,6 +1422,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { ...@@ -1259,6 +1422,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
Map* map = reinterpret_cast<Map*>(result); Map* map = reinterpret_cast<Map*>(result);
map->set_map(meta_map()); map->set_map(meta_map());
map->set_instance_type(instance_type); map->set_instance_type(instance_type);
map->set_scavenger(GetScavenger(instance_type, instance_size));
map->set_prototype(null_value()); map->set_prototype(null_value());
map->set_constructor(null_value()); map->set_constructor(null_value());
map->set_instance_size(instance_size); map->set_instance_size(instance_size);
...@@ -3691,9 +3855,9 @@ bool Heap::IteratePointersInDirtyMapsRegion( ...@@ -3691,9 +3855,9 @@ bool Heap::IteratePointersInDirtyMapsRegion(
} }
void Heap::IterateAndMarkPointersToNewSpace(Address start, void Heap::IterateAndMarkPointersToFromSpace(Address start,
Address end, Address end,
ObjectSlotCallback callback) { ObjectSlotCallback callback) {
Address slot_address = start; Address slot_address = start;
Page* page = Page::FromAddress(start); Page* page = Page::FromAddress(start);
...@@ -3701,7 +3865,7 @@ void Heap::IterateAndMarkPointersToNewSpace(Address start, ...@@ -3701,7 +3865,7 @@ void Heap::IterateAndMarkPointersToNewSpace(Address start,
while (slot_address < end) { while (slot_address < end) {
Object** slot = reinterpret_cast<Object**>(slot_address); Object** slot = reinterpret_cast<Object**>(slot_address);
if (Heap::InNewSpace(*slot)) { if (Heap::InFromSpace(*slot)) {
ASSERT((*slot)->IsHeapObject()); ASSERT((*slot)->IsHeapObject());
callback(reinterpret_cast<HeapObject**>(slot)); callback(reinterpret_cast<HeapObject**>(slot));
if (Heap::InNewSpace(*slot)) { if (Heap::InNewSpace(*slot)) {
......
...@@ -774,11 +774,12 @@ class Heap : public AllStatic { ...@@ -774,11 +774,12 @@ class Heap : public AllStatic {
DirtyRegionCallback visit_dirty_region, DirtyRegionCallback visit_dirty_region,
ObjectSlotCallback callback); ObjectSlotCallback callback);
// Iterate pointers to new space found in memory interval from start to end. // Iterate pointers to from semispace of new space found in memory interval
// from start to end.
// Update dirty marks for page containing start address. // Update dirty marks for page containing start address.
static void IterateAndMarkPointersToNewSpace(Address start, static void IterateAndMarkPointersToFromSpace(Address start,
Address end, Address end,
ObjectSlotCallback callback); ObjectSlotCallback callback);
// Iterate pointers to new space found in memory interval from start to end. // Iterate pointers to new space found in memory interval from start to end.
// Return true if pointers to new space was found. // Return true if pointers to new space was found.
...@@ -985,6 +986,8 @@ class Heap : public AllStatic { ...@@ -985,6 +986,8 @@ class Heap : public AllStatic {
static void RecordStats(HeapStats* stats); static void RecordStats(HeapStats* stats);
static Scavenger GetScavenger(int instance_type, int instance_size);
// Copy block of memory from src to dst. Size of block should be aligned // Copy block of memory from src to dst. Size of block should be aligned
// by pointer size. // by pointer size.
static inline void CopyBlock(Address dst, Address src, int byte_size); static inline void CopyBlock(Address dst, Address src, int byte_size);
...@@ -1232,17 +1235,7 @@ class Heap : public AllStatic { ...@@ -1232,17 +1235,7 @@ class Heap : public AllStatic {
set_instanceof_cache_function(the_hole_value()); set_instanceof_cache_function(the_hole_value());
} }
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
// object. Returns the target object.
static inline HeapObject* MigrateObject(HeapObject* source,
HeapObject* target,
int size);
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Record the copy of an object in the NewSpace's statistics.
static void RecordCopiedObject(HeapObject* obj);
// Record statistics before and after garbage collection. // Record statistics before and after garbage collection.
static void ReportStatisticsBeforeGC(); static void ReportStatisticsBeforeGC();
static void ReportStatisticsAfterGC(); static void ReportStatisticsAfterGC();
......
...@@ -2060,6 +2060,23 @@ void ExternalFloatArray::set(int index, float value) { ...@@ -2060,6 +2060,23 @@ void ExternalFloatArray::set(int index, float value) {
ptr[index] = value; ptr[index] = value;
} }
inline Scavenger Map::scavenger() {
Scavenger callback = reinterpret_cast<Scavenger>(
READ_INTPTR_FIELD(this, kIterateBodyCallbackOffset));
ASSERT(callback == Heap::GetScavenger(instance_type(),
instance_size()));
return callback;
}
inline void Map::set_scavenger(Scavenger callback) {
ASSERT(!reinterpret_cast<Object*>(
reinterpret_cast<intptr_t>(callback))->IsHeapObject());
WRITE_INTPTR_FIELD(this,
kIterateBodyCallbackOffset,
reinterpret_cast<intptr_t>(callback));
}
int Map::instance_size() { int Map::instance_size() {
return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2; return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
......
...@@ -2190,6 +2190,8 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode, ...@@ -2190,6 +2190,8 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
int new_instance_size = map()->instance_size() - instance_size_delta; int new_instance_size = map()->instance_size() - instance_size_delta;
new_map->set_inobject_properties(0); new_map->set_inobject_properties(0);
new_map->set_instance_size(new_instance_size); new_map->set_instance_size(new_instance_size);
new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
new_map->instance_size()));
Heap::CreateFillerObjectAt(this->address() + new_instance_size, Heap::CreateFillerObjectAt(this->address() + new_instance_size,
instance_size_delta); instance_size_delta);
} }
......
...@@ -2899,6 +2899,7 @@ class Code: public HeapObject { ...@@ -2899,6 +2899,7 @@ class Code: public HeapObject {
DISALLOW_IMPLICIT_CONSTRUCTORS(Code); DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
}; };
typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object);
// All heap objects have a Map that describes their structure. // All heap objects have a Map that describes their structure.
// A Map contains information about: // A Map contains information about:
...@@ -3100,6 +3101,13 @@ class Map: public HeapObject { ...@@ -3100,6 +3101,13 @@ class Map: public HeapObject {
void MapVerify(); void MapVerify();
#endif #endif
inline Scavenger scavenger();
inline void set_scavenger(Scavenger callback);
inline void Scavenge(HeapObject** slot, HeapObject* obj) {
scavenger()(this, slot, obj);
}
static const int kMaxPreAllocatedPropertyFields = 255; static const int kMaxPreAllocatedPropertyFields = 255;
// Layout description. // Layout description.
...@@ -3110,7 +3118,8 @@ class Map: public HeapObject { ...@@ -3110,7 +3118,8 @@ class Map: public HeapObject {
static const int kInstanceDescriptorsOffset = static const int kInstanceDescriptorsOffset =
kConstructorOffset + kPointerSize; kConstructorOffset + kPointerSize;
static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize; static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize;
static const int kPadStart = kCodeCacheOffset + kPointerSize; static const int kIterateBodyCallbackOffset = kCodeCacheOffset + kPointerSize;
static const int kPadStart = kIterateBodyCallbackOffset + kPointerSize;
static const int kSize = MAP_POINTER_ALIGN(kPadStart); static const int kSize = MAP_POINTER_ALIGN(kPadStart);
// Layout of pointer fields. Heap iteration code relies on them // Layout of pointer fields. Heap iteration code relies on them
......
...@@ -673,6 +673,14 @@ void Deserializer::ReadObject(int space_number, ...@@ -673,6 +673,14 @@ void Deserializer::ReadObject(int space_number,
LOG(SnapshotPositionEvent(address, source_->position())); LOG(SnapshotPositionEvent(address, source_->position()));
} }
ReadChunk(current, limit, space_number, address); ReadChunk(current, limit, space_number, address);
if (space == Heap::map_space()) {
ASSERT(size == Map::kSize);
HeapObject* obj = HeapObject::FromAddress(address);
Map* map = reinterpret_cast<Map*>(obj);
map->set_scavenger(Heap::GetScavenger(map->instance_type(),
map->instance_size()));
}
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment