Commit 28f9412c authored by vegorov@chromium.org's avatar vegorov@chromium.org

Generalize virtually dispatched scavenger to virtually dispatched specialized visitors.

Review URL: http://codereview.chromium.org/3066044

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5246 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent fb039823
......@@ -84,6 +84,7 @@ SOURCES = {
mark-compact.cc
messages.cc
objects.cc
objects-visiting.cc
oprofile-agent.cc
parser.cc
profile-generator.cc
......
......@@ -190,6 +190,29 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
}
template<typename StaticVisitor>
void RelocInfo::Visit() {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
StaticVisitor::VisitPointer(target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (Debug::has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
IsPatchedDebugBreakSlotSequence()))) {
StaticVisitor::VisitDebugTarget(this);
#endif
} else if (mode == RelocInfo::RUNTIME_ENTRY) {
StaticVisitor::VisitRuntimeEntry(this);
}
}
Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
rm_ = no_reg;
imm32_ = immediate;
......
......@@ -235,6 +235,7 @@ class RelocInfo BASE_EMBEDDED {
INLINE(void set_call_object(Object* target));
INLINE(Object** call_object_address());
template<typename StaticVisitor> inline void Visit();
inline void Visit(ObjectVisitor* v);
// Patch the code with some other code.
......
......@@ -36,6 +36,7 @@
#include "global-handles.h"
#include "macro-assembler.h"
#include "natives.h"
#include "objects-visiting.h"
#include "snapshot.h"
#include "stub-cache.h"
......@@ -813,9 +814,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
initial_map->set_instance_size(
initial_map->instance_size() + 5 * kPointerSize);
initial_map->set_instance_descriptors(*descriptors);
initial_map->set_scavenger(
Heap::GetScavenger(initial_map->instance_type(),
initial_map->instance_size()));
initial_map->set_visitor_id(StaticVisitorBase::GetVisitorId(*initial_map));
}
{ // -- J S O N
......
......@@ -32,6 +32,7 @@
#include "execution.h"
#include "factory.h"
#include "macro-assembler.h"
#include "objects-visiting.h"
namespace v8 {
namespace internal {
......@@ -277,8 +278,7 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
copy->set_inobject_properties(inobject_properties);
copy->set_unused_property_fields(inobject_properties);
copy->set_instance_size(copy->instance_size() + instance_size_delta);
copy->set_scavenger(Heap::GetScavenger(copy->instance_type(),
copy->instance_size()));
copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy));
return copy;
}
......
......@@ -37,6 +37,7 @@
#include "global-handles.h"
#include "mark-compact.h"
#include "natives.h"
#include "objects-visiting.h"
#include "scanner.h"
#include "scopeinfo.h"
#include "snapshot.h"
......@@ -1032,6 +1033,17 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable(
}
class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
public:
static inline void VisitPointer(Object** p) {
Object* object = *p;
if (!Heap::InNewSpace(object)) return;
Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
}
};
Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
Address new_space_front) {
do {
......@@ -1042,10 +1054,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
// queue is empty.
while (new_space_front < new_space_.top()) {
HeapObject* object = HeapObject::FromAddress(new_space_front);
Map* map = object->map();
int size = object->SizeFromMap(map);
object->IterateBody(map->instance_type(), size, scavenge_visitor);
new_space_front += size;
new_space_front += NewSpaceScavenger::IterateBody(object->map(), object);
}
// Promote and process all the to-be-promoted objects.
......@@ -1072,315 +1081,231 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
}
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
static void RecordCopiedObject(HeapObject* obj) {
bool should_record = false;
#ifdef DEBUG
should_record = FLAG_heap_stats;
#endif
#ifdef ENABLE_LOGGING_AND_PROFILING
should_record = should_record || FLAG_log_gc;
#endif
if (should_record) {
if (Heap::new_space()->Contains(obj)) {
Heap::new_space()->RecordAllocation(obj);
} else {
Heap::new_space()->RecordPromotion(obj);
}
}
}
#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
class ScavengingVisitor : public StaticVisitorBase {
public:
static void Initialize() {
table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
table_.Register(kVisitByteArray, &EvacuateByteArray);
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
// object. Returns the target object.
inline static HeapObject* MigrateObject(HeapObject* source,
HeapObject* target,
int size) {
// Copy the content of source to target.
Heap::CopyBlock(target->address(), source->address(), size);
table_.Register(kVisitConsString,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
VisitSpecialized<ConsString::kSize>);
// Set the forwarding address.
source->set_map_word(MapWord::FromForwardingAddress(target));
table_.Register(kVisitSharedFunctionInfo,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
VisitSpecialized<SharedFunctionInfo::kSize>);
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Update NewSpace stats if necessary.
RecordCopiedObject(target);
#endif
HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
kVisitDataObject,
kVisitDataObjectGeneric>();
return target;
}
table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
kVisitJSObject,
kVisitJSObjectGeneric>();
table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
kVisitStruct,
kVisitStructGeneric>();
}
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
enum SizeRestriction { SMALL, UNKNOWN_SIZE };
static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
table_.GetVisitor(map)(map, slot, obj);
}
template<ObjectContents object_contents, SizeRestriction size_restriction>
static inline void EvacuateObject(Map* map,
HeapObject** slot,
HeapObject* object,
int object_size) {
ASSERT((size_restriction != SMALL) ||
(object_size <= Page::kMaxHeapObjectSize));
ASSERT(object->Size() == object_size);
if (Heap::ShouldBePromoted(object->address(), object_size)) {
Object* result;
private:
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
enum SizeRestriction { SMALL, UNKNOWN_SIZE };
if ((size_restriction != SMALL) &&
(object_size > Page::kMaxHeapObjectSize)) {
result = Heap::lo_space()->AllocateRawFixedArray(object_size);
} else {
if (object_contents == DATA_OBJECT) {
result = Heap::old_data_space()->AllocateRaw(object_size);
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
static void RecordCopiedObject(HeapObject* obj) {
bool should_record = false;
#ifdef DEBUG
should_record = FLAG_heap_stats;
#endif
#ifdef ENABLE_LOGGING_AND_PROFILING
should_record = should_record || FLAG_log_gc;
#endif
if (should_record) {
if (Heap::new_space()->Contains(obj)) {
Heap::new_space()->RecordAllocation(obj);
} else {
result = Heap::old_pointer_space()->AllocateRaw(object_size);
Heap::new_space()->RecordPromotion(obj);
}
}
if (!result->IsFailure()) {
HeapObject* target = HeapObject::cast(result);
*slot = MigrateObject(object, target, object_size);
if (object_contents == POINTER_OBJECT) {
promotion_queue.insert(target, object_size);
}
Heap::tracer()->increment_promoted_objects_size(object_size);
return;
}
}
Object* result = Heap::new_space()->AllocateRaw(object_size);
ASSERT(!result->IsFailure());
*slot = MigrateObject(object, HeapObject::cast(result), object_size);
return;
}
#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
// object. Returns the target object.
INLINE(static HeapObject* MigrateObject(HeapObject* source,
HeapObject* target,
int size)) {
// Copy the content of source to target.
Heap::CopyBlock(target->address(), source->address(), size);
template<int object_size_in_words, ObjectContents object_contents>
static inline void EvacuateObjectOfFixedSize(Map* map,
HeapObject** slot,
HeapObject* object) {
const int object_size = object_size_in_words << kPointerSizeLog2;
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
}
// Set the forwarding address.
source->set_map_word(MapWord::FromForwardingAddress(target));
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Update NewSpace stats if necessary.
RecordCopiedObject(target);
#endif
HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
template<ObjectContents object_contents>
static inline void EvacuateObjectOfFixedSize(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = map->instance_size();
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
}
return target;
}
static inline void EvacuateFixedArray(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = FixedArray::cast(object)->FixedArraySize();
EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
template<ObjectContents object_contents, SizeRestriction size_restriction>
static inline void EvacuateObject(Map* map,
HeapObject** slot,
HeapObject* object,
int object_size) {
ASSERT((size_restriction != SMALL) ||
(object_size <= Page::kMaxHeapObjectSize));
ASSERT(object->Size() == object_size);
if (Heap::ShouldBePromoted(object->address(), object_size)) {
Object* result;
static inline void EvacuateByteArray(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = ByteArray::cast(object)->ByteArraySize();
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
if ((size_restriction != SMALL) &&
(object_size > Page::kMaxHeapObjectSize)) {
result = Heap::lo_space()->AllocateRawFixedArray(object_size);
} else {
if (object_contents == DATA_OBJECT) {
result = Heap::old_data_space()->AllocateRaw(object_size);
} else {
result = Heap::old_pointer_space()->AllocateRaw(object_size);
}
}
if (!result->IsFailure()) {
HeapObject* target = HeapObject::cast(result);
*slot = MigrateObject(object, target, object_size);
static Scavenger GetScavengerForSize(int object_size,
ObjectContents object_contents) {
ASSERT(IsAligned(object_size, kPointerSize));
ASSERT(object_size < Page::kMaxHeapObjectSize);
if (object_contents == POINTER_OBJECT) {
promotion_queue.insert(target, object_size);
}
switch (object_size >> kPointerSizeLog2) {
#define CASE(n) \
case n: \
if (object_contents == DATA_OBJECT) { \
return static_cast<Scavenger>( \
&EvacuateObjectOfFixedSize<n, DATA_OBJECT>); \
} else { \
return static_cast<Scavenger>( \
&EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
Heap::tracer()->increment_promoted_objects_size(object_size);
return;
}
}
Object* result = Heap::new_space()->AllocateRaw(object_size);
ASSERT(!result->IsFailure());
*slot = MigrateObject(object, HeapObject::cast(result), object_size);
return;
}
CASE(1);
CASE(2);
CASE(3);
CASE(4);
CASE(5);
CASE(6);
CASE(7);
CASE(8);
CASE(9);
CASE(10);
CASE(11);
CASE(12);
CASE(13);
CASE(14);
CASE(15);
CASE(16);
default:
if (object_contents == DATA_OBJECT) {
return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
} else {
return static_cast<Scavenger>(
&EvacuateObjectOfFixedSize<POINTER_OBJECT>);
}
#undef CASE
static inline void EvacuateFixedArray(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
slot,
object,
object_size);
}
}
static inline void EvacuateSeqAsciiString(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static inline void EvacuateByteArray(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static inline void EvacuateSeqTwoByteString(Map* map,
static inline void EvacuateSeqAsciiString(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
int object_size = SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static inline bool IsShortcutCandidate(int type) {
return ((type & kShortcutTypeMask) == kShortcutTypeTag);
}
static inline void EvacuateSeqTwoByteString(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
}
static inline void EvacuateShortcutCandidate(Map* map,
HeapObject** slot,
HeapObject* object) {
ASSERT(IsShortcutCandidate(map->instance_type()));
static inline bool IsShortcutCandidate(int type) {
return ((type & kShortcutTypeMask) == kShortcutTypeTag);
}
if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
HeapObject* first =
HeapObject::cast(ConsString::cast(object)->unchecked_first());
static inline void EvacuateShortcutCandidate(Map* map,
HeapObject** slot,
HeapObject* object) {
ASSERT(IsShortcutCandidate(map->instance_type()));
*slot = first;
if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
HeapObject* first =
HeapObject::cast(ConsString::cast(object)->unchecked_first());
if (!Heap::InNewSpace(first)) {
object->set_map_word(MapWord::FromForwardingAddress(first));
return;
}
*slot = first;
MapWord first_word = first->map_word();
if (first_word.IsForwardingAddress()) {
HeapObject* target = first_word.ToForwardingAddress();
if (!Heap::InNewSpace(first)) {
object->set_map_word(MapWord::FromForwardingAddress(first));
return;
}
*slot = target;
object->set_map_word(MapWord::FromForwardingAddress(target));
MapWord first_word = first->map_word();
if (first_word.IsForwardingAddress()) {
HeapObject* target = first_word.ToForwardingAddress();
*slot = target;
object->set_map_word(MapWord::FromForwardingAddress(target));
return;
}
Scavenge(first->map(), slot, first);
object->set_map_word(MapWord::FromForwardingAddress(*slot));
return;
}
first->map()->Scavenge(slot, first);
object->set_map_word(MapWord::FromForwardingAddress(*slot));
return;
int object_size = ConsString::kSize;
EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
}
int object_size = ConsString::kSize;
EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
}
Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
case kSeqStringTag:
if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
return &EvacuateSeqAsciiString;
} else {
return &EvacuateSeqTwoByteString;
}
case kConsStringTag:
if (IsShortcutCandidate(instance_type)) {
return &EvacuateShortcutCandidate;
} else {
ASSERT(instance_size == ConsString::kSize);
return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
}
case kExternalStringTag:
ASSERT(instance_size == ExternalString::kSize);
return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
template<ObjectContents object_contents>
class ObjectEvacuationStrategy {
public:
template<int object_size>
static inline void VisitSpecialized(Map* map,
HeapObject** slot,
HeapObject* object) {
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
}
UNREACHABLE();
}
switch (instance_type) {
case BYTE_ARRAY_TYPE:
return reinterpret_cast<Scavenger>(&EvacuateByteArray);
case FIXED_ARRAY_TYPE:
return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
case JS_ARRAY_TYPE:
case JS_REGEXP_TYPE:
case JS_FUNCTION_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
return GetScavengerForSize(instance_size, POINTER_OBJECT);
case ODDBALL_TYPE:
return NULL;
case PROXY_TYPE:
return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
static inline void Visit(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = map->instance_size();
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
}
};
case MAP_TYPE:
return NULL;
typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
case CODE_TYPE:
return NULL;
static VisitorDispatchTable<Callback> table_;
};
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return NULL;
case HEAP_NUMBER_TYPE:
case FILLER_TYPE:
case PIXEL_ARRAY_TYPE:
case EXTERNAL_BYTE_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
case EXTERNAL_SHORT_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
case EXTERNAL_INT_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
case EXTERNAL_FLOAT_ARRAY_TYPE:
return GetScavengerForSize(instance_size, DATA_OBJECT);
case SHARED_FUNCTION_INFO_TYPE:
return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
POINTER_OBJECT);
#define MAKE_STRUCT_CASE(NAME, Name, name) \
case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
return GetScavengerForSize(instance_size, POINTER_OBJECT);
default:
UNREACHABLE();
return NULL;
}
}
VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
......@@ -1388,7 +1313,7 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
MapWord first_word = object->map_word();
ASSERT(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
map->Scavenge(p, object);
ScavengingVisitor::Scavenge(map, p, object);
}
......@@ -1407,7 +1332,8 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
reinterpret_cast<Map*>(result)->
set_scavenger(GetScavenger(instance_type, instance_size));
set_visitor_id(
StaticVisitorBase::GetVisitorId(instance_type, instance_size));
reinterpret_cast<Map*>(result)->set_inobject_properties(0);
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
......@@ -1424,7 +1350,8 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
Map* map = reinterpret_cast<Map*>(result);
map->set_map(meta_map());
map->set_instance_type(instance_type);
map->set_scavenger(GetScavenger(instance_type, instance_size));
map->set_visitor_id(
StaticVisitorBase::GetVisitorId(instance_type, instance_size));
map->set_prototype(null_value());
map->set_constructor(null_value());
map->set_instance_size(instance_size);
......@@ -4197,6 +4124,10 @@ bool Heap::Setup(bool create_heap_objects) {
if (!ConfigureHeapDefault()) return false;
}
ScavengingVisitor::Initialize();
NewSpaceScavenger::Initialize();
MarkCompactCollector::Initialize();
// Setup memory allocator and reserve a chunk of memory for new
// space. The chunk is double the size of the requested reserved
// new space size to ensure that we can find a pair of semispaces that
......@@ -4881,6 +4812,7 @@ GCTracer::~GCTracer() {
PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
PrintF("flushcode=%d ", static_cast<int>(scopes_[Scope::MC_FLUSH_CODE]));
......
......@@ -983,8 +983,6 @@ class Heap : public AllStatic {
static void RecordStats(HeapStats* stats, bool take_snapshot = false);
static Scavenger GetScavenger(int instance_type, int instance_size);
// Copy block of memory from src to dst. Size of block should be aligned
// by pointer size.
static inline void CopyBlock(Address dst, Address src, int byte_size);
......@@ -1725,6 +1723,7 @@ class GCTracer BASE_EMBEDDED {
EXTERNAL,
MC_MARK,
MC_SWEEP,
MC_SWEEP_NEWSPACE,
MC_COMPACT,
MC_FLUSH_CODE,
kNumberOfScopes
......
......@@ -183,6 +183,30 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
}
template<typename StaticVisitor>
void RelocInfo::Visit() {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
StaticVisitor::VisitPointer(target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (Debug::has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
IsPatchedDebugBreakSlotSequence()))) {
StaticVisitor::VisitDebugTarget(this);
#endif
} else if (mode == RelocInfo::RUNTIME_ENTRY) {
StaticVisitor::VisitRuntimeEntry(this);
}
}
Immediate::Immediate(int x) {
x_ = x;
rmode_ = RelocInfo::NONE;
......
......@@ -32,6 +32,7 @@
#include "global-handles.h"
#include "ic-inl.h"
#include "mark-compact.h"
#include "objects-visiting.h"
#include "stub-cache.h"
namespace v8 {
......@@ -63,6 +64,7 @@ int MarkCompactCollector::live_cell_objects_size_ = 0;
int MarkCompactCollector::live_lo_objects_size_ = 0;
#endif
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
......@@ -244,14 +246,72 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
}
// Helper class for marking pointers in HeapObjects.
class MarkingVisitor : public ObjectVisitor {
class StaticMarkingVisitor : public StaticVisitorBase {
public:
void VisitPointer(Object** p) {
static inline void IterateBody(Map* map, HeapObject* obj) {
table_.GetVisitor(map)(map, obj);
}
static void Initialize() {
table_.Register(kVisitShortcutCandidate,
&FixedBodyVisitor<StaticMarkingVisitor,
ConsString::BodyDescriptor,
void>::Visit);
table_.Register(kVisitConsString,
&FixedBodyVisitor<StaticMarkingVisitor,
ConsString::BodyDescriptor,
void>::Visit);
table_.Register(kVisitFixedArray,
&FlexibleBodyVisitor<StaticMarkingVisitor,
FixedArray::BodyDescriptor,
void>::Visit);
table_.Register(kVisitSharedFunctionInfo,
&FixedBodyVisitor<StaticMarkingVisitor,
SharedFunctionInfo::BodyDescriptor,
void>::Visit);
table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
table_.Register(kVisitOddball,
&FixedBodyVisitor<StaticMarkingVisitor,
Oddball::BodyDescriptor,
void>::Visit);
table_.Register(kVisitMap,
&FixedBodyVisitor<StaticMarkingVisitor,
Map::BodyDescriptor,
void>::Visit);
table_.Register(kVisitCode, &VisitCode);
table_.Register(kVisitPropertyCell,
&FixedBodyVisitor<StaticMarkingVisitor,
JSGlobalPropertyCell::BodyDescriptor,
void>::Visit);
table_.RegisterSpecializations<DataObjectVisitor,
kVisitDataObject,
kVisitDataObjectGeneric>();
table_.RegisterSpecializations<JSObjectVisitor,
kVisitJSObject,
kVisitJSObjectGeneric>();
table_.RegisterSpecializations<StructObjectVisitor,
kVisitStruct,
kVisitStructGeneric>();
}
INLINE(static void VisitPointer(Object** p)) {
MarkObjectByPointer(p);
}
void VisitPointers(Object** start, Object** end) {
INLINE(static void VisitPointers(Object** start, Object** end)) {
// Mark all objects pointed to in [start, end).
const int kMinRangeForMarkingRecursion = 64;
if (end - start >= kMinRangeForMarkingRecursion) {
......@@ -261,7 +321,7 @@ class MarkingVisitor : public ObjectVisitor {
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
}
void VisitCodeTarget(RelocInfo* rinfo) {
static inline void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
......@@ -273,7 +333,7 @@ class MarkingVisitor : public ObjectVisitor {
}
}
void VisitDebugTarget(RelocInfo* rinfo) {
static inline void VisitDebugTarget(RelocInfo* rinfo) {
ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
rinfo->IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
......@@ -282,19 +342,15 @@ class MarkingVisitor : public ObjectVisitor {
MarkCompactCollector::MarkObject(code);
}
private:
// Mark object pointed to by p.
void MarkObjectByPointer(Object** p) {
INLINE(static void MarkObjectByPointer(Object** p)) {
if (!(*p)->IsHeapObject()) return;
HeapObject* object = ShortCircuitConsString(p);
MarkCompactCollector::MarkObject(object);
}
// Tells whether the mark sweep collection will perform compaction.
bool IsCompacting() { return MarkCompactCollector::IsCompacting(); }
// Visit an unmarked object.
void VisitUnmarkedObject(HeapObject* obj) {
static inline void VisitUnmarkedObject(HeapObject* obj) {
#ifdef DEBUG
ASSERT(Heap::Contains(obj));
ASSERT(!obj->IsMarked());
......@@ -303,12 +359,12 @@ class MarkingVisitor : public ObjectVisitor {
MarkCompactCollector::SetMark(obj);
// Mark the map pointer and the body.
MarkCompactCollector::MarkObject(map);
obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this);
IterateBody(map, obj);
}
// Visit all unmarked objects pointed to by [start, end).
// Returns false if the operation fails (lack of stack space).
inline bool VisitUnmarkedObjects(Object** start, Object** end) {
static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
// Return false is we are close to the stack limit.
StackLimitCheck check;
if (check.HasOverflowed()) return false;
......@@ -322,6 +378,60 @@ class MarkingVisitor : public ObjectVisitor {
}
return true;
}
static inline void VisitExternalReference(Address* p) { }
static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
private:
class DataObjectVisitor {
public:
template<int size>
static void VisitSpecialized(Map* map, HeapObject* object) {
}
static void Visit(Map* map, HeapObject* object) {
}
};
typedef FlexibleBodyVisitor<StaticMarkingVisitor,
JSObject::BodyDescriptor,
void> JSObjectVisitor;
typedef FlexibleBodyVisitor<StaticMarkingVisitor,
StructBodyDescriptor,
void> StructObjectVisitor;
static void VisitCode(Map* map, HeapObject* object) {
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
}
typedef void (*Callback)(Map* map, HeapObject* object);
static VisitorDispatchTable<Callback> table_;
};
VisitorDispatchTable<StaticMarkingVisitor::Callback>
StaticMarkingVisitor::table_;
class MarkingVisitor : public ObjectVisitor {
public:
void VisitPointer(Object** p) {
StaticMarkingVisitor::VisitPointer(p);
}
void VisitPointers(Object** start, Object** end) {
StaticMarkingVisitor::VisitPointers(start, end);
}
void VisitCodeTarget(RelocInfo* rinfo) {
StaticMarkingVisitor::VisitCodeTarget(rinfo);
}
void VisitDebugTarget(RelocInfo* rinfo) {
StaticMarkingVisitor::VisitDebugTarget(rinfo);
}
};
......@@ -336,11 +446,7 @@ class RootMarkingVisitor : public ObjectVisitor {
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
}
MarkingVisitor* stack_visitor() { return &stack_visitor_; }
private:
MarkingVisitor stack_visitor_;
void MarkObjectByPointer(Object** p) {
if (!(*p)->IsHeapObject()) return;
......@@ -351,14 +457,14 @@ class RootMarkingVisitor : public ObjectVisitor {
Map* map = object->map();
// Mark the object.
MarkCompactCollector::SetMark(object);
// Mark the map pointer and body, and push them on the marking stack.
MarkCompactCollector::MarkObject(map);
object->IterateBody(map->instance_type(), object->SizeFromMap(map),
&stack_visitor_);
StaticMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
MarkCompactCollector::EmptyMarkingStack(&stack_visitor_);
MarkCompactCollector::EmptyMarkingStack();
}
};
......@@ -425,11 +531,12 @@ void MarkCompactCollector::MarkMapContents(Map* map) {
// Mark the Object* fields of the Map.
// Since the descriptor array has been marked already, it is fine
// that one of these fields contains a pointer to it.
MarkingVisitor visitor; // Has no state or contents.
visitor.VisitPointers(HeapObject::RawField(map,
Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map,
Map::kPointerFieldsEndOffset));
Object** start_slot = HeapObject::RawField(map,
Map::kPointerFieldsBeginOffset);
Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
}
......@@ -529,7 +636,7 @@ void MarkCompactCollector::MarkSymbolTable() {
// Explicitly mark the prefix.
MarkingVisitor marker;
symbol_table->IteratePrefix(&marker);
ProcessMarkingStack(&marker);
ProcessMarkingStack();
}
......@@ -544,7 +651,7 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
// There may be overflowed objects in the heap. Visit them now.
while (marking_stack.overflowed()) {
RefillMarkingStack();
EmptyMarkingStack(visitor->stack_visitor());
EmptyMarkingStack();
}
}
......@@ -587,7 +694,7 @@ void MarkCompactCollector::MarkObjectGroups() {
// Before: the marking stack contains zero or more heap object pointers.
// After: the marking stack is empty, and all objects reachable from the
// marking stack have been marked, or are overflowed in the heap.
void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) {
void MarkCompactCollector::EmptyMarkingStack() {
while (!marking_stack.is_empty()) {
HeapObject* object = marking_stack.Pop();
ASSERT(object->IsHeapObject());
......@@ -601,8 +708,8 @@ void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) {
map_word.ClearMark();
Map* map = map_word.ToMap();
MarkObject(map);
object->IterateBody(map->instance_type(), object->SizeFromMap(map),
visitor);
StaticMarkingVisitor::IterateBody(map, object);
}
}
......@@ -652,22 +759,22 @@ void MarkCompactCollector::RefillMarkingStack() {
// stack. Before: the marking stack contains zero or more heap object
// pointers. After: the marking stack is empty and there are no overflowed
// objects in the heap.
void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) {
EmptyMarkingStack(visitor);
void MarkCompactCollector::ProcessMarkingStack() {
EmptyMarkingStack();
while (marking_stack.overflowed()) {
RefillMarkingStack();
EmptyMarkingStack(visitor);
EmptyMarkingStack();
}
}
void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) {
void MarkCompactCollector::ProcessObjectGroups() {
bool work_to_do = true;
ASSERT(marking_stack.is_empty());
while (work_to_do) {
MarkObjectGroups();
work_to_do = !marking_stack.is_empty();
ProcessMarkingStack(visitor);
ProcessMarkingStack();
}
}
......@@ -692,7 +799,7 @@ void MarkCompactCollector::MarkLiveObjects() {
// objects are unmarked. Mark objects reachable from object groups
// containing at least one marked object, and continue until no new
// objects are reachable from the object groups.
ProcessObjectGroups(root_visitor.stack_visitor());
ProcessObjectGroups();
// The objects reachable from the roots or object groups are marked,
// yet unreachable objects are unmarked. Mark objects reachable
......@@ -705,12 +812,12 @@ void MarkCompactCollector::MarkLiveObjects() {
GlobalHandles::IterateWeakRoots(&root_visitor);
while (marking_stack.overflowed()) {
RefillMarkingStack();
EmptyMarkingStack(root_visitor.stack_visitor());
EmptyMarkingStack();
}
// Repeat the object groups to mark unmarked groups reachable from the
// weak roots.
ProcessObjectGroups(root_visitor.stack_visitor());
ProcessObjectGroups();
// Prune the symbol table removing all symbols only pointed to by the
// symbol table. Cannot use symbol_table() here because the symbol
......@@ -1091,16 +1198,35 @@ static void MigrateObject(Address dst,
}
class StaticPointersToNewGenUpdatingVisitor : public
StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
public:
static inline void VisitPointer(Object** p) {
if (!(*p)->IsHeapObject()) return;
HeapObject* obj = HeapObject::cast(*p);
Address old_addr = obj->address();
if (Heap::new_space()->Contains(obj)) {
ASSERT(Heap::InFromSpace(*p));
*p = HeapObject::FromAddress(Memory::Address_at(old_addr));
}
}
};
// Visitor for updating pointers from live objects in old spaces to new space.
// It does not expect to encounter pointers to dead objects.
class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
public:
void VisitPointer(Object** p) {
UpdatePointer(p);
StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
}
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) UpdatePointer(p);
for (Object** p = start; p < end; p++) {
StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
}
}
void VisitCodeTarget(RelocInfo* rinfo) {
......@@ -1119,19 +1245,6 @@ class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
VisitPointer(&target);
rinfo->set_call_address(Code::cast(target)->instruction_start());
}
private:
void UpdatePointer(Object** p) {
if (!(*p)->IsHeapObject()) return;
HeapObject* obj = HeapObject::cast(*p);
Address old_addr = obj->address();
if (Heap::new_space()->Contains(obj)) {
ASSERT(Heap::InFromSpace(*p));
*p = HeapObject::FromAddress(Memory::Address_at(old_addr));
}
}
};
......@@ -1248,15 +1361,12 @@ static void SweepNewSpace(NewSpace* space) {
PointersToNewGenUpdatingVisitor updating_visitor;
// Update pointers in to space.
HeapObject* object;
for (Address current = space->bottom();
current < space->top();
current += object->Size()) {
object = HeapObject::FromAddress(current);
object->IterateBody(object->map()->instance_type(),
object->Size(),
&updating_visitor);
Address current = space->bottom();
while (current < space->top()) {
HeapObject* object = HeapObject::FromAddress(current);
current +=
StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(),
object);
}
// Update roots.
......@@ -1758,7 +1868,9 @@ void MarkCompactCollector::SweepSpaces() {
SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock);
SweepSpace(Heap::code_space(), &DeallocateCodeBlock);
SweepSpace(Heap::cell_space(), &DeallocateCellBlock);
SweepNewSpace(Heap::new_space());
{ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
SweepNewSpace(Heap::new_space());
}
SweepSpace(Heap::map_space(), &DeallocateMapBlock);
Heap::IterateDirtyRegions(Heap::map_space(),
......@@ -2327,4 +2439,11 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
#endif
}
void MarkCompactCollector::Initialize() {
StaticPointersToNewGenUpdatingVisitor::Initialize();
StaticMarkingVisitor::Initialize();
}
} } // namespace v8::internal
......@@ -86,6 +86,9 @@ class MarkCompactCollector: public AllStatic {
force_compaction_ = value;
}
static void Initialize();
// Prepares for GC by resetting relocation info in old and map spaces and
// choosing spaces to compact.
static void Prepare(GCTracer* tracer);
......@@ -171,6 +174,7 @@ class MarkCompactCollector: public AllStatic {
friend class RootMarkingVisitor;
friend class MarkingVisitor;
friend class StaticMarkingVisitor;
// Marking operations for objects reachable from roots.
static void MarkLiveObjects();
......@@ -214,17 +218,17 @@ class MarkCompactCollector: public AllStatic {
// Mark all objects in an object group with at least one marked
// object, then all objects reachable from marked objects in object
// groups, and repeat.
static void ProcessObjectGroups(MarkingVisitor* visitor);
static void ProcessObjectGroups();
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap.
static void ProcessMarkingStack(MarkingVisitor* visitor);
static void ProcessMarkingStack();
// Mark objects reachable (transitively) from objects in the marking
// stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's
// overflow flag will be set.
static void EmptyMarkingStack(MarkingVisitor* visitor);
static void EmptyMarkingStack();
// Refill the marking stack with overflowed objects from the heap. This
// function either leaves the marking stack full or clears the overflow
......
......@@ -2060,21 +2060,8 @@ void ExternalFloatArray::set(int index, float value) {
ptr[index] = value;
}
inline Scavenger Map::scavenger() {
Scavenger callback = reinterpret_cast<Scavenger>(
READ_INTPTR_FIELD(this, kScavengerCallbackOffset));
ASSERT(callback == Heap::GetScavenger(instance_type(),
instance_size()));
return callback;
}
inline void Map::set_scavenger(Scavenger callback) {
WRITE_INTPTR_FIELD(this,
kScavengerCallbackOffset,
reinterpret_cast<intptr_t>(callback));
}
INT_ACCESSORS(Map, visitor_id, kScavengerCallbackOffset)
int Map::instance_size() {
return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
......@@ -2099,7 +2086,7 @@ int HeapObject::SizeFromMap(Map* map) {
(kStringTag | kConsStringTag) ||
instance_type == JS_ARRAY_TYPE) return map->instance_size();
if (instance_type == FIXED_ARRAY_TYPE) {
return reinterpret_cast<FixedArray*>(this)->FixedArraySize();
return FixedArray::BodyDescriptor::SizeOf(map, this);
}
if (instance_type == BYTE_ARRAY_TYPE) {
return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
......@@ -2819,12 +2806,6 @@ void Proxy::set_proxy(Address value) {
}
void Proxy::ProxyIterateBody(ObjectVisitor* visitor) {
visitor->VisitExternalReference(
reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
}
ACCESSORS(JSValue, value, Object, kValueOffset)
......@@ -3308,6 +3289,74 @@ Object* FixedArray::Copy() {
}
int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
return map->instance_size();
}
void Proxy::ProxyIterateBody(ObjectVisitor* v) {
v->VisitExternalReference(
reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
}
template<typename StaticVisitor>
void Proxy::ProxyIterateBody() {
StaticVisitor::VisitExternalReference(
reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
}
void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
typedef v8::String::ExternalAsciiStringResource Resource;
v->VisitExternalAsciiString(
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
template<typename StaticVisitor>
void ExternalAsciiString::ExternalAsciiStringIterateBody() {
typedef v8::String::ExternalAsciiStringResource Resource;
StaticVisitor::VisitExternalAsciiString(
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
typedef v8::String::ExternalStringResource Resource;
v->VisitExternalTwoByteString(
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
template<typename StaticVisitor>
void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
typedef v8::String::ExternalStringResource Resource;
StaticVisitor::VisitExternalTwoByteString(
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
#define SLOT_ADDR(obj, offset) \
reinterpret_cast<Object**>((obj)->address() + offset)
template<int start_offset, int end_offset, int size>
void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
HeapObject* obj,
ObjectVisitor* v) {
v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
}
template<int start_offset>
void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
int object_size,
ObjectVisitor* v) {
v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
}
#undef SLOT_ADDR
#undef CAST_ACCESSOR
#undef INT_ACCESSORS
#undef SMI_ACCESSORS
......
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "ic-inl.h"
#include "objects-visiting.h"
namespace v8 {
namespace internal {
static inline bool IsShortcutCandidate(int type) {
return ((type & kShortcutTypeMask) == kShortcutTypeTag);
}
StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
int instance_type,
int instance_size) {
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
case kSeqStringTag:
if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
return kVisitSeqAsciiString;
} else {
return kVisitSeqTwoByteString;
}
case kConsStringTag:
if (IsShortcutCandidate(instance_type)) {
return kVisitShortcutCandidate;
} else {
return kVisitConsString;
}
case kExternalStringTag:
return GetVisitorIdForSize(kVisitDataObject,
kVisitDataObjectGeneric,
ExternalString::kSize);
}
UNREACHABLE();
}
switch (instance_type) {
case BYTE_ARRAY_TYPE:
return kVisitByteArray;
case FIXED_ARRAY_TYPE:
return kVisitFixedArray;
case ODDBALL_TYPE:
return kVisitOddball;
case MAP_TYPE:
return kVisitMap;
case CODE_TYPE:
return kVisitCode;
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
case SHARED_FUNCTION_INFO_TYPE:
return kVisitSharedFunctionInfo;
case PROXY_TYPE:
return GetVisitorIdForSize(kVisitDataObject,
kVisitDataObjectGeneric,
Proxy::kSize);
case FILLER_TYPE:
return kVisitDataObjectGeneric;
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
case JS_ARRAY_TYPE:
case JS_REGEXP_TYPE:
case JS_FUNCTION_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
return GetVisitorIdForSize(kVisitJSObject,
kVisitJSObjectGeneric,
instance_size);
case HEAP_NUMBER_TYPE:
case PIXEL_ARRAY_TYPE:
case EXTERNAL_BYTE_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
case EXTERNAL_SHORT_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
case EXTERNAL_INT_ARRAY_TYPE:
case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
case EXTERNAL_FLOAT_ARRAY_TYPE:
return GetVisitorIdForSize(kVisitDataObject,
kVisitDataObjectGeneric,
instance_size);
#define MAKE_STRUCT_CASE(NAME, Name, name) \
case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
return GetVisitorIdForSize(kVisitStruct,
kVisitStructGeneric,
instance_size);
default:
UNREACHABLE();
return kVisitorIdCount;
}
}
} } // namespace v8::internal
// Copyright 2006-2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_OBJECTS_ITERATION_H_
#define V8_OBJECTS_ITERATION_H_
// This file provides base classes and auxiliary methods for defining
// static object visitors used during GC.
// Visiting HeapObject body with a normal ObjectVisitor requires performing
// two switches on object's instance type to determine object size and layout
// and one or more virtual method calls on visitor itself.
// Static visitor is different: it provides a dispatch table which contains
// pointers to specialized visit functions. Each map has the visitor_id
// field which contains an index of specialized visitor to use.
namespace v8 {
namespace internal {
// Base class for all static visitors.
class StaticVisitorBase : public AllStatic {
public:
enum VisitorId {
kVisitSeqAsciiString = 0,
kVisitSeqTwoByteString,
kVisitShortcutCandidate,
kVisitByteArray,
kVisitFixedArray,
// For data objects, JS objects and structs along with generic visitor which
// can visit object of any size we provide visitors specialized by
// object size in words.
// Ids of specialized visitors are declared in a linear order (without
// holes) starting from the id of visitor specialized for 2 words objects
// (base visitor id) and ending with the id of generic visitor.
// Method GetVisitorIdForSize depends on this ordering to calculate visitor
// id of specialized visitor from given instance size, base visitor id and
// generic visitor's id.
kVisitDataObject,
kVisitDataObject2 = kVisitDataObject,
kVisitDataObject3,
kVisitDataObject4,
kVisitDataObject5,
kVisitDataObject6,
kVisitDataObject7,
kVisitDataObject8,
kVisitDataObject9,
kVisitDataObjectGeneric,
kVisitJSObject,
kVisitJSObject2 = kVisitJSObject,
kVisitJSObject3,
kVisitJSObject4,
kVisitJSObject5,
kVisitJSObject6,
kVisitJSObject7,
kVisitJSObject8,
kVisitJSObject9,
kVisitJSObjectGeneric,
kVisitStruct,
kVisitStruct2 = kVisitStruct,
kVisitStruct3,
kVisitStruct4,
kVisitStruct5,
kVisitStruct6,
kVisitStruct7,
kVisitStruct8,
kVisitStruct9,
kVisitStructGeneric,
kVisitConsString,
kVisitOddball,
kVisitCode,
kVisitMap,
kVisitPropertyCell,
kVisitSharedFunctionInfo,
kVisitorIdCount,
kMinObjectSizeInWords = 2
};
// Determine which specialized visitor should be used for given instance type
// and instance type.
static VisitorId GetVisitorId(int instance_type, int instance_size);
static VisitorId GetVisitorId(Map* map) {
return GetVisitorId(map->instance_type(), map->instance_size());
}
// For visitors that allow specialization by size calculate VisitorId based
// on size, base visitor id and generic visitor id.
static VisitorId GetVisitorIdForSize(VisitorId base,
VisitorId generic,
int object_size) {
ASSERT((base == kVisitDataObject) ||
(base == kVisitStruct) ||
(base == kVisitJSObject));
ASSERT(IsAligned(object_size, kPointerSize));
ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
ASSERT(object_size < Page::kMaxHeapObjectSize);
const VisitorId specialization = static_cast<VisitorId>(
base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
return Min(specialization, generic);
}
};
template<typename Callback>
class VisitorDispatchTable {
public:
inline Callback GetVisitor(Map* map) {
return callbacks_[map->visitor_id()];
}
void Register(StaticVisitorBase::VisitorId id, Callback callback) {
ASSERT((0 <= id) && (id < StaticVisitorBase::kVisitorIdCount));
callbacks_[id] = callback;
}
template<typename Visitor,
StaticVisitorBase::VisitorId base,
StaticVisitorBase::VisitorId generic,
int object_size_in_words>
void RegisterSpecialization() {
static const int size = object_size_in_words * kPointerSize;
Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
&Visitor::template VisitSpecialized<size>);
}
template<typename Visitor,
StaticVisitorBase::VisitorId base,
StaticVisitorBase::VisitorId generic>
void RegisterSpecializations() {
STATIC_ASSERT(
(generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
RegisterSpecialization<Visitor, base, generic, 2>();
RegisterSpecialization<Visitor, base, generic, 3>();
RegisterSpecialization<Visitor, base, generic, 4>();
RegisterSpecialization<Visitor, base, generic, 5>();
RegisterSpecialization<Visitor, base, generic, 6>();
RegisterSpecialization<Visitor, base, generic, 7>();
RegisterSpecialization<Visitor, base, generic, 8>();
RegisterSpecialization<Visitor, base, generic, 9>();
Register(generic, &Visitor::Visit);
}
private:
Callback callbacks_[StaticVisitorBase::kVisitorIdCount];
};
template<typename StaticVisitor>
class BodyVisitorBase : public AllStatic {
public:
static inline void IteratePointers(HeapObject* object,
int start_offset,
int end_offset) {
Object** start_slot = reinterpret_cast<Object**>(object->address() +
start_offset);
Object** end_slot = reinterpret_cast<Object**>(object->address() +
end_offset);
StaticVisitor::VisitPointers(start_slot, end_slot);
}
};
template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
public:
static inline ReturnType Visit(Map* map, HeapObject* object) {
int object_size = BodyDescriptor::SizeOf(map, object);
IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
return static_cast<ReturnType>(object_size);
}
template<int object_size>
static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
return static_cast<ReturnType>(object_size);
}
};
template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
public:
static inline ReturnType Visit(Map* map, HeapObject* object) {
IteratePointers(object,
BodyDescriptor::kStartOffset,
BodyDescriptor::kEndOffset);
return static_cast<ReturnType>(BodyDescriptor::kSize);
}
};
// Base class for visitors used for a linear new space iteration.
// IterateBody returns size of visited object.
// Certain types of objects (i.e. Code objects) are not handled
// by dispatch table of this visitor because they cannot appear
// in the new space.
//
// This class is intended to be used in the following way:
//
// class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
// ...
// }
//
// This is an example of Curiously recurring template pattern
// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
// We use CRTP to guarantee aggressive compile time optimizations (i.e.
// inlining and specialization of StaticVisitor::VisitPointers methods).
template<typename StaticVisitor>
class StaticNewSpaceVisitor : public StaticVisitorBase {
public:
static void Initialize() {
table_.Register(kVisitShortcutCandidate,
&FixedBodyVisitor<StaticVisitor,
ConsString::BodyDescriptor,
int>::Visit);
table_.Register(kVisitConsString,
&FixedBodyVisitor<StaticVisitor,
ConsString::BodyDescriptor,
int>::Visit);
table_.Register(kVisitFixedArray,
&FlexibleBodyVisitor<StaticVisitor,
FixedArray::BodyDescriptor,
int>::Visit);
table_.Register(kVisitByteArray, &VisitByteArray);
table_.Register(kVisitSharedFunctionInfo,
&FixedBodyVisitor<StaticVisitor,
SharedFunctionInfo::BodyDescriptor,
int>::Visit);
table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
table_.RegisterSpecializations<DataObjectVisitor,
kVisitDataObject,
kVisitDataObjectGeneric>();
table_.RegisterSpecializations<JSObjectVisitor,
kVisitJSObject,
kVisitJSObjectGeneric>();
table_.RegisterSpecializations<StructVisitor,
kVisitStruct,
kVisitStructGeneric>();
}
static inline int IterateBody(Map* map, HeapObject* obj) {
return table_.GetVisitor(map)(map, obj);
}
static inline void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(p);
}
private:
static inline int VisitByteArray(Map* map, HeapObject* object) {
return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
}
static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
return SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
}
static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
return SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
}
class DataObjectVisitor {
public:
template<int object_size>
static inline int VisitSpecialized(Map* map, HeapObject* object) {
return object_size;
}
static inline int Visit(Map* map, HeapObject* object) {
return map->instance_size();
}
};
typedef FlexibleBodyVisitor<StaticVisitor,
StructBodyDescriptor,
int> StructVisitor;
typedef FlexibleBodyVisitor<StaticVisitor,
JSObject::BodyDescriptor,
int> JSObjectVisitor;
typedef int (*Callback)(Map* map, HeapObject* object);
static VisitorDispatchTable<Callback> table_;
};
template<typename StaticVisitor>
VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
StaticNewSpaceVisitor<StaticVisitor>::table_;
void Code::CodeIterateBody(ObjectVisitor* v) {
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
// Use the relocation info pointer before it is visited by
// the heap compaction in the next statement.
RelocIterator it(this, mode_mask);
IteratePointers(v,
kRelocationInfoOffset,
kRelocationInfoOffset + kPointerSize);
for (; !it.done(); it.next()) {
it.rinfo()->Visit(v);
}
}
template<typename StaticVisitor>
void Code::CodeIterateBody() {
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
// Use the relocation info pointer before it is visited by
// the heap compaction in the next statement.
RelocIterator it(this, mode_mask);
StaticVisitor::VisitPointer(
reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
for (; !it.done(); it.next()) {
it.rinfo()->Visit<StaticVisitor>();
}
}
} } // namespace v8::internal
#endif // V8_OBJECTS_ITERATION_H_
......@@ -33,6 +33,7 @@
#include "debug.h"
#include "execution.h"
#include "objects-inl.h"
#include "objects-visiting.h"
#include "macro-assembler.h"
#include "scanner.h"
#include "scopeinfo.h"
......@@ -1042,7 +1043,7 @@ int HeapObject::SlowSizeFromMap(Map* map) {
switch (instance_type) {
case FIXED_ARRAY_TYPE:
return reinterpret_cast<FixedArray*>(this)->FixedArraySize();
return FixedArray::BodyDescriptor::SizeOf(map, this);
case BYTE_ARRAY_TYPE:
return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
case CODE_TYPE:
......@@ -1073,7 +1074,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case kSeqStringTag:
break;
case kConsStringTag:
reinterpret_cast<ConsString*>(this)->ConsStringIterateBody(v);
ConsString::BodyDescriptor::IterateBody(this, v);
break;
case kExternalStringTag:
if ((type & kStringEncodingMask) == kAsciiStringTag) {
......@@ -1090,7 +1091,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
switch (type) {
case FIXED_ARRAY_TYPE:
reinterpret_cast<FixedArray*>(this)->FixedArrayIterateBody(v);
FixedArray::BodyDescriptor::IterateBody(this, object_size, v);
break;
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
......@@ -1101,23 +1102,22 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
reinterpret_cast<JSObject*>(this)->JSObjectIterateBody(object_size, v);
JSObject::BodyDescriptor::IterateBody(this, object_size, v);
break;
case ODDBALL_TYPE:
reinterpret_cast<Oddball*>(this)->OddballIterateBody(v);
Oddball::BodyDescriptor::IterateBody(this, v);
break;
case PROXY_TYPE:
reinterpret_cast<Proxy*>(this)->ProxyIterateBody(v);
break;
case MAP_TYPE:
reinterpret_cast<Map*>(this)->MapIterateBody(v);
Map::BodyDescriptor::IterateBody(this, v);
break;
case CODE_TYPE:
reinterpret_cast<Code*>(this)->CodeIterateBody(v);
break;
case JS_GLOBAL_PROPERTY_CELL_TYPE:
reinterpret_cast<JSGlobalPropertyCell*>(this)
->JSGlobalPropertyCellIterateBody(v);
JSGlobalPropertyCell::BodyDescriptor::IterateBody(this, v);
break;
case HEAP_NUMBER_TYPE:
case FILLER_TYPE:
......@@ -1131,16 +1131,15 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
case EXTERNAL_FLOAT_ARRAY_TYPE:
break;
case SHARED_FUNCTION_INFO_TYPE: {
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(this);
shared->SharedFunctionInfoIterateBody(v);
case SHARED_FUNCTION_INFO_TYPE:
SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
break;
}
#define MAKE_STRUCT_CASE(NAME, Name, name) \
case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
IterateStructBody(object_size, v);
StructBodyDescriptor::IterateBody(this, object_size, v);
break;
default:
PrintF("Unknown type: %d\n", type);
......@@ -1209,12 +1208,6 @@ String* JSObject::constructor_name() {
}
void JSObject::JSObjectIterateBody(int object_size, ObjectVisitor* v) {
// Iterate over all fields in the body. Assumes all are Object*.
IteratePointers(v, kPropertiesOffset, object_size);
}
Object* JSObject::AddFastPropertyUsingMap(Map* new_map,
String* name,
Object* value) {
......@@ -2190,8 +2183,7 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
int new_instance_size = map()->instance_size() - instance_size_delta;
new_map->set_inobject_properties(0);
new_map->set_instance_size(new_instance_size);
new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
new_map->instance_size()));
new_map->set_visitor_id(StaticVisitorBase::GetVisitorId(new_map));
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
instance_size_delta);
}
......@@ -3407,11 +3399,6 @@ void CodeCacheHashTable::RemoveByIndex(int index) {
}
void FixedArray::FixedArrayIterateBody(ObjectVisitor* v) {
IteratePointers(v, kHeaderSize, kHeaderSize + length() * kPointerSize);
}
static bool HasKey(FixedArray* array, Object* key) {
int len0 = array->length();
for (int i = 0; i < len0; i++) {
......@@ -4501,16 +4488,6 @@ void ConsString::ConsStringReadBlockIntoBuffer(ReadBlockBuffer* rbb,
}
void ConsString::ConsStringIterateBody(ObjectVisitor* v) {
IteratePointers(v, kFirstOffset, kSecondOffset + kPointerSize);
}
void JSGlobalPropertyCell::JSGlobalPropertyCellIterateBody(ObjectVisitor* v) {
IteratePointers(v, kValueOffset, kValueOffset + kPointerSize);
}
uint16_t ConsString::ConsStringGet(int index) {
ASSERT(index >= 0 && index < this->length());
......@@ -4614,24 +4591,6 @@ void String::WriteToFlat(String* src,
}
#define FIELD_ADDR(p, offset) \
(reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
typedef v8::String::ExternalAsciiStringResource Resource;
v->VisitExternalAsciiString(
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
typedef v8::String::ExternalStringResource Resource;
v->VisitExternalTwoByteString(
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
}
#undef FIELD_ADDR
template <typename IteratorA, typename IteratorB>
static inline bool CompareStringContents(IteratorA* ia, IteratorB* ib) {
// General slow case check. We know that the ia and ib iterators
......@@ -5035,12 +4994,6 @@ void Map::ClearNonLiveTransitions(Object* real_prototype) {
}
void Map::MapIterateBody(ObjectVisitor* v) {
// Assumes all Object* members are contiguously allocated!
IteratePointers(v, kPointerFieldsBeginOffset, kPointerFieldsEndOffset);
}
Object* JSFunction::SetInstancePrototype(Object* value) {
ASSERT(value->IsJSObject());
......@@ -5104,12 +5057,6 @@ Context* JSFunction::GlobalContextFromLiterals(FixedArray* literals) {
}
void Oddball::OddballIterateBody(ObjectVisitor* v) {
// Assumes all Object* members are contiguously allocated!
IteratePointers(v, kToStringOffset, kToNumberOffset + kPointerSize);
}
Object* Oddball::Initialize(const char* to_string, Object* to_number) {
Object* symbol = Heap::LookupAsciiSymbol(to_string);
if (symbol->IsFailure()) return symbol;
......@@ -5282,13 +5229,6 @@ void SharedFunctionInfo::SourceCodePrint(StringStream* accumulator,
}
void SharedFunctionInfo::SharedFunctionInfoIterateBody(ObjectVisitor* v) {
IteratePointers(v,
kNameOffset,
kThisPropertyAssignmentsOffset + kPointerSize);
}
void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
......@@ -5310,28 +5250,6 @@ void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
}
void Code::CodeIterateBody(ObjectVisitor* v) {
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
// Use the relocation info pointer before it is visited by
// the heap compaction in the next statement.
RelocIterator it(this, mode_mask);
IteratePointers(v,
kRelocationInfoOffset,
kRelocationInfoOffset + kPointerSize);
for (; !it.done(); it.next()) {
it.rinfo()->Visit(v);
}
}
void Code::Relocate(intptr_t delta) {
for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) {
it.rinfo()->apply(delta);
......
......@@ -1106,6 +1106,51 @@ class HeapObject: public Object {
};
#define SLOT_ADDR(obj, offset) \
reinterpret_cast<Object**>((obj)->address() + offset)
// This class describes a body of an object of a fixed size
// in which all pointer fields are located in the [start_offset, end_offset)
// interval.
template<int start_offset, int end_offset, int size>
class FixedBodyDescriptor {
public:
static const int kStartOffset = start_offset;
static const int kEndOffset = end_offset;
static const int kSize = size;
static inline void IterateBody(HeapObject* obj, ObjectVisitor* v);
template<typename StaticVisitor>
static inline void IterateBody(HeapObject* obj) {
StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
SLOT_ADDR(obj, end_offset));
}
};
// This class describes a body of an object of a variable size
// in which all pointer fields are located in the [start_offset, object_size)
// interval.
template<int start_offset>
class FlexibleBodyDescriptor {
public:
static const int kStartOffset = start_offset;
static inline void IterateBody(HeapObject* obj,
int object_size,
ObjectVisitor* v);
template<typename StaticVisitor>
static inline void IterateBody(HeapObject* obj, int object_size) {
StaticVisitor::VisitPointers(SLOT_ADDR(obj, start_offset),
SLOT_ADDR(obj, object_size));
}
};
#undef SLOT_ADDR
// The HeapNumber class describes heap allocated numbers that cannot be
// represented in a Smi (small integer)
class HeapNumber: public HeapObject {
......@@ -1522,7 +1567,6 @@ class JSObject: public HeapObject {
// Dispatched behavior.
void JSObjectIterateBody(int object_size, ObjectVisitor* v);
void JSObjectShortPrint(StringStream* accumulator);
#ifdef DEBUG
void JSObjectPrint();
......@@ -1578,6 +1622,11 @@ class JSObject: public HeapObject {
STATIC_CHECK(kHeaderSize == Internals::kJSObjectHeaderSize);
class BodyDescriptor : public FlexibleBodyDescriptor<kPropertiesOffset> {
public:
static inline int SizeOf(Map* map, HeapObject* object);
};
private:
Object* GetElementWithCallback(Object* receiver,
Object* structure,
......@@ -1692,8 +1741,6 @@ class FixedArray: public HeapObject {
static const int kMaxLength = (kMaxSize - kHeaderSize) / kPointerSize;
// Dispatched behavior.
int FixedArraySize() { return SizeFor(length()); }
void FixedArrayIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void FixedArrayPrint();
void FixedArrayVerify();
......@@ -1711,6 +1758,13 @@ class FixedArray: public HeapObject {
// object, the prefix of this array is sorted.
void SortPairs(FixedArray* numbers, uint32_t len);
class BodyDescriptor : public FlexibleBodyDescriptor<kHeaderSize> {
public:
static inline int SizeOf(Map* map, HeapObject* object) {
return SizeFor(reinterpret_cast<FixedArray*>(object)->length());
}
};
protected:
// Set operation on FixedArray without using write barriers. Can
// only be used for storing old space objects or smis.
......@@ -2426,7 +2480,9 @@ class ByteArray: public HeapObject {
static inline ByteArray* cast(Object* obj);
// Dispatched behavior.
int ByteArraySize() { return SizeFor(length()); }
inline int ByteArraySize() {
return SizeFor(this->length());
}
#ifdef DEBUG
void ByteArrayPrint();
void ByteArrayVerify();
......@@ -2847,7 +2903,10 @@ class Code: public HeapObject {
// Dispatched behavior.
int CodeSize() { return SizeFor(body_size()); }
void CodeIterateBody(ObjectVisitor* v);
inline void CodeIterateBody(ObjectVisitor* v);
template<typename StaticVisitor>
inline void CodeIterateBody();
#ifdef DEBUG
void CodePrint();
void CodeVerify();
......@@ -2893,7 +2952,6 @@ class Code: public HeapObject {
DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
};
typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object);
// All heap objects have a Map that describes their structure.
// A Map contains information about:
......@@ -3089,18 +3147,13 @@ class Map: public HeapObject {
void ClearNonLiveTransitions(Object* real_prototype);
// Dispatched behavior.
void MapIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void MapPrint();
void MapVerify();
#endif
inline Scavenger scavenger();
inline void set_scavenger(Scavenger callback);
inline void Scavenge(HeapObject** slot, HeapObject* obj) {
scavenger()(this, slot, obj);
}
inline int visitor_id();
inline void set_visitor_id(int visitor_id);
static const int kMaxPreAllocatedPropertyFields = 255;
......@@ -3160,6 +3213,10 @@ class Map: public HeapObject {
static const int kCodeCacheEntryNameOffset = 0;
static const int kCodeCacheEntryCodeOffset = 1;
typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
kPointerFieldsEndOffset,
kSize> BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Map);
};
......@@ -3414,7 +3471,6 @@ class SharedFunctionInfo: public HeapObject {
int CalculateInObjectProperties();
// Dispatched behavior.
void SharedFunctionInfoIterateBody(ObjectVisitor* v);
// Set max_length to -1 for unlimited length.
void SourceCodePrint(StringStream* accumulator, int max_length);
#ifdef DEBUG
......@@ -3503,6 +3559,10 @@ class SharedFunctionInfo: public HeapObject {
#endif
static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
typedef FixedBodyDescriptor<kNameOffset,
kThisPropertyAssignmentsOffset + kPointerSize,
kSize> BodyDescriptor;
private:
// Bit positions in start_position_and_type.
// The source code start position is in the 30 most significant bits of
......@@ -4553,11 +4613,6 @@ class ConsString: public String {
// Casting.
static inline ConsString* cast(Object* obj);
// Garbage collection support. This method is called during garbage
// collection to iterate through the heap pointers in the body of
// the ConsString.
void ConsStringIterateBody(ObjectVisitor* v);
// Layout description.
static const int kFirstOffset = POINTER_SIZE_ALIGN(String::kSize);
static const int kSecondOffset = kFirstOffset + kPointerSize;
......@@ -4574,6 +4629,9 @@ class ConsString: public String {
// Minimum length for a cons string.
static const int kMinLength = 13;
typedef FixedBodyDescriptor<kFirstOffset, kSecondOffset + kPointerSize, kSize>
BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ConsString);
};
......@@ -4623,7 +4681,10 @@ class ExternalAsciiString: public ExternalString {
static inline ExternalAsciiString* cast(Object* obj);
// Garbage collection support.
void ExternalAsciiStringIterateBody(ObjectVisitor* v);
inline void ExternalAsciiStringIterateBody(ObjectVisitor* v);
template<typename StaticVisitor>
inline void ExternalAsciiStringIterateBody();
// Support for StringInputBuffer.
const unibrow::byte* ExternalAsciiStringReadBlock(unsigned* remaining,
......@@ -4660,7 +4721,11 @@ class ExternalTwoByteString: public ExternalString {
static inline ExternalTwoByteString* cast(Object* obj);
// Garbage collection support.
void ExternalTwoByteStringIterateBody(ObjectVisitor* v);
inline void ExternalTwoByteStringIterateBody(ObjectVisitor* v);
template<typename StaticVisitor>
inline void ExternalTwoByteStringIterateBody();
// Support for StringInputBuffer.
void ExternalTwoByteStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,
......@@ -4771,7 +4836,6 @@ class Oddball: public HeapObject {
static inline Oddball* cast(Object* obj);
// Dispatched behavior.
void OddballIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void OddballVerify();
#endif
......@@ -4784,6 +4848,10 @@ class Oddball: public HeapObject {
static const int kToNumberOffset = kToStringOffset + kPointerSize;
static const int kSize = kToNumberOffset + kPointerSize;
typedef FixedBodyDescriptor<kToStringOffset,
kToNumberOffset + kPointerSize,
kSize> BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Oddball);
};
......@@ -4797,8 +4865,6 @@ class JSGlobalPropertyCell: public HeapObject {
// Casting.
static inline JSGlobalPropertyCell* cast(Object* obj);
// Dispatched behavior.
void JSGlobalPropertyCellIterateBody(ObjectVisitor* v);
#ifdef DEBUG
void JSGlobalPropertyCellVerify();
void JSGlobalPropertyCellPrint();
......@@ -4808,6 +4874,10 @@ class JSGlobalPropertyCell: public HeapObject {
static const int kValueOffset = HeapObject::kHeaderSize;
static const int kSize = kValueOffset + kPointerSize;
typedef FixedBodyDescriptor<kValueOffset,
kValueOffset + kPointerSize,
kSize> BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSGlobalPropertyCell);
};
......@@ -4828,6 +4898,10 @@ class Proxy: public HeapObject {
// Dispatched behavior.
inline void ProxyIterateBody(ObjectVisitor* v);
template<typename StaticVisitor>
inline void ProxyIterateBody();
#ifdef DEBUG
void ProxyPrint();
void ProxyVerify();
......@@ -5345,6 +5419,15 @@ class ObjectVisitor BASE_EMBEDDED {
};
class StructBodyDescriptor : public
FlexibleBodyDescriptor<HeapObject::kHeaderSize> {
public:
static inline int SizeOf(Map* map, HeapObject* object) {
return map->instance_size();
}
};
// BooleanBit is a helper class for setting and getting a bit in an
// integer or Smi.
class BooleanBit : public AllStatic {
......
......@@ -680,14 +680,6 @@ void Deserializer::ReadObject(int space_number,
LOG(SnapshotPositionEvent(address, source_->position()));
}
ReadChunk(current, limit, space_number, address);
if (space == Heap::map_space()) {
ASSERT(size == Map::kSize);
HeapObject* obj = HeapObject::FromAddress(address);
Map* map = reinterpret_cast<Map*>(obj);
map->set_scavenger(Heap::GetScavenger(map->instance_type(),
map->instance_size()));
}
}
......
......@@ -350,6 +350,29 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
}
template<typename StaticVisitor>
void RelocInfo::Visit() {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
StaticVisitor::VisitPointer(target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (Debug::has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
IsPatchedDebugBreakSlotSequence()))) {
StaticVisitor::VisitDebugTarget(this);
#endif
} else if (mode == RelocInfo::RUNTIME_ENTRY) {
StaticVisitor::VisitRuntimeEntry(this);
}
}
// -----------------------------------------------------------------------------
// Implementation of Operand
......
......@@ -38,7 +38,7 @@
from __future__ import with_statement
import sys, types, re, subprocess
import sys, types, re, subprocess, math
def flatten(l):
flat = []
......@@ -262,48 +262,57 @@ plots = [
],
]
def freduce(f, field, trace, init):
return reduce(lambda t,r: f(t, r[field]), trace, init)
def calc_total(trace, field):
return reduce(lambda t,r: t + r[field], trace, 0)
return freduce(lambda t,v: t + v, field, trace, 0)
def calc_max(trace, field):
return reduce(lambda t,r: max(t, r[field]), trace, 0)
return freduce(lambda t,r: max(t, r), field, trace, 0)
def process_trace(filename):
trace = parse_gc_trace(filename)
total_gc = calc_total(trace, 'pause')
max_gc = calc_max(trace, 'pause')
avg_gc = total_gc / len(trace)
def count_nonzero(trace, field):
return freduce(lambda t,r: t if r == 0 else t + 1, field, trace, 0)
total_sweep = calc_total(trace, 'sweep')
max_sweep = calc_max(trace, 'sweep')
total_mark = calc_total(trace, 'mark')
max_mark = calc_max(trace, 'mark')
def process_trace(filename):
trace = parse_gc_trace(filename)
marksweeps = filter(lambda r: r['gc'] == 'ms', trace)
markcompacts = filter(lambda r: r['gc'] == 'mc', trace)
scavenges = filter(lambda r: r['gc'] == 's', trace)
total_scavenge = calc_total(scavenges, 'pause')
max_scavenge = calc_max(scavenges, 'pause')
avg_scavenge = total_scavenge / len(scavenges)
charts = plot_all(plots, trace, filename)
def stats(out, prefix, trace, field):
n = len(trace)
total = calc_total(trace, field)
max = calc_max(trace, field)
avg = total / n
if n > 1:
dev = math.sqrt(freduce(lambda t,r: (r - avg) ** 2, field, trace, 0) /
(n - 1))
else:
dev = 0
out.write('<tr><td>%s</td><td>%d</td><td>%d</td>'
'<td>%d</td><td>%d [dev %f]</td></tr>' %
(prefix, n, total, max, avg, dev))
with open(filename + '.html', 'w') as out:
out.write('<html><body>')
out.write('<table><tr><td>')
out.write('Total in GC: <b>%d</b><br/>' % total_gc)
out.write('Max in GC: <b>%d</b><br/>' % max_gc)
out.write('Avg in GC: <b>%d</b><br/>' % avg_gc)
out.write('</td><td>')
out.write('Total in Scavenge: <b>%d</b><br/>' % total_scavenge)
out.write('Max in Scavenge: <b>%d</b><br/>' % max_scavenge)
out.write('Avg in Scavenge: <b>%d</b><br/>' % avg_scavenge)
out.write('</td><td>')
out.write('Total in Sweep: <b>%d</b><br/>' % total_sweep)
out.write('Max in Sweep: <b>%d</b><br/>' % max_sweep)
out.write('</td><td>')
out.write('Total in Mark: <b>%d</b><br/>' % total_mark)
out.write('Max in Mark: <b>%d</b><br/>' % max_mark)
out.write('</td></tr></table>')
out.write('<table>')
out.write('<tr><td>Phase</td><td>Count</td><td>Time (ms)</td><td>Max</td><td>Avg</td></tr>')
stats(out, 'Total in GC', trace, 'pause')
stats(out, 'Scavenge', scavenges, 'pause')
stats(out, 'MarkSweep', marksweeps, 'pause')
stats(out, 'MarkCompact', markcompacts, 'pause')
stats(out, 'Mark', filter(lambda r: r['mark'] != 0, trace), 'mark')
stats(out, 'Sweep', filter(lambda r: r['sweep'] != 0, trace), 'sweep')
stats(out, 'Flush Code', filter(lambda r: r['flushcode'] != 0, trace), 'flushcode')
stats(out, 'Compact', filter(lambda r: r['compact'] != 0, trace), 'compact')
out.write('</table>')
for chart in charts:
out.write('<img src="%s">' % chart)
out.write('</body></html>')
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment