Implement explicit back pointers in transition tree.

This makes back pointers in the map transition tree explicit by having
accurate back pointers throughout the lifetime of maps instead of
establishing and destroying back pointers before and after each marking
phase. This is a prerequisite for being able to clear map transitions
during incremental marking.

R=vegorov@chromium.org
BUG=v8:1465

Review URL: https://chromiumcodereview.appspot.com/10381053

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11528 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 13e4b7d8
......@@ -2020,7 +2020,7 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
map->set_pre_allocated_property_fields(0);
map->init_instance_descriptors();
map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
map->set_prototype_transitions(empty_fixed_array(), SKIP_WRITE_BARRIER);
map->init_prototype_transitions(undefined_value());
map->set_unused_property_fields(0);
map->set_bit_field(0);
map->set_bit_field2(1 << Map::kIsExtensible);
......@@ -2159,15 +2159,15 @@ bool Heap::CreateInitialMaps() {
// Fix the instance_descriptors for the existing maps.
meta_map()->init_instance_descriptors();
meta_map()->set_code_cache(empty_fixed_array());
meta_map()->set_prototype_transitions(empty_fixed_array());
meta_map()->init_prototype_transitions(undefined_value());
fixed_array_map()->init_instance_descriptors();
fixed_array_map()->set_code_cache(empty_fixed_array());
fixed_array_map()->set_prototype_transitions(empty_fixed_array());
fixed_array_map()->init_prototype_transitions(undefined_value());
oddball_map()->init_instance_descriptors();
oddball_map()->set_code_cache(empty_fixed_array());
oddball_map()->set_prototype_transitions(empty_fixed_array());
oddball_map()->init_prototype_transitions(undefined_value());
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
......
......@@ -680,7 +680,6 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
ASSERT(!FLAG_never_compact || !FLAG_always_compact);
if (collect_maps_) CreateBackPointers();
#ifdef ENABLE_GDB_JIT_INTERFACE
if (FLAG_gdbjit) {
// If GDBJIT interface is active disable compaction.
......@@ -1816,13 +1815,19 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
void MarkCompactCollector::MarkMapContents(Map* map) {
// Mark prototype transitions array but don't push it into marking stack.
// This will make references from it weak. We will clean dead prototype
// transitions in ClearNonLiveTransitions.
FixedArray* prototype_transitions = map->prototype_transitions();
MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
if (!mark.Get()) {
mark.Set();
MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
prototype_transitions->Size());
// transitions in ClearNonLiveTransitions. But make sure that back pointers
// stored inside prototype transitions arrays are marked.
Object* raw_proto_transitions = map->unchecked_prototype_transitions();
if (raw_proto_transitions->IsFixedArray()) {
FixedArray* prototype_transitions = FixedArray::cast(raw_proto_transitions);
MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
if (!mark.Get()) {
mark.Set();
MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
prototype_transitions->Size());
MarkObjectAndPush(HeapObject::cast(
prototype_transitions->get(Map::kProtoTransitionBackPointerOffset)));
}
}
Object** raw_descriptor_array_slot =
......@@ -1921,23 +1926,6 @@ void MarkCompactCollector::MarkDescriptorArray(
}
void MarkCompactCollector::CreateBackPointers() {
HeapObjectIterator iterator(heap()->map_space());
for (HeapObject* next_object = iterator.Next();
next_object != NULL; next_object = iterator.Next()) {
if (next_object->IsMap()) { // Could also be FreeSpace object on free list.
Map* map = Map::cast(next_object);
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
if (map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
map->CreateBackPointers();
} else {
ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
}
}
}
}
// Fill the marking stack with overflowed objects returned by the given
// iterator. Stop when the marking stack is filled or the end of the space
// is reached, whichever comes first.
......@@ -2461,15 +2449,8 @@ void MarkCompactCollector::ReattachInitialMaps() {
void MarkCompactCollector::ClearNonLiveTransitions() {
HeapObjectIterator map_iterator(heap()->map_space());
// Iterate over the map space, setting map transitions that go from
// a marked map to an unmarked map to null transitions. At the same time,
// set all the prototype fields of maps back to their original value,
// dropping the back pointers temporarily stored in the prototype field.
// Setting the prototype field requires following the linked list of
// back pointers, reversing them all at once. This allows us to find
// those maps with map transitions that need to be nulled, and only
// scan the descriptor arrays of those maps, not all maps.
// All of these actions are carried out only on maps of JSObjects
// and related subtypes.
// a marked map to an unmarked map to null transitions. This action
// is carried out only on maps of JSObjects and related subtypes.
for (HeapObject* obj = map_iterator.Next();
obj != NULL; obj = map_iterator.Next()) {
Map* map = reinterpret_cast<Map*>(obj);
......@@ -2545,36 +2526,16 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
MarkBit map_mark) {
// Follow the chain of back pointers to find the prototype.
Object* real_prototype = map;
while (real_prototype->IsMap()) {
real_prototype = Map::cast(real_prototype)->prototype();
ASSERT(real_prototype->IsHeapObject());
}
Object* potential_parent = map->GetBackPointer();
if (!potential_parent->IsMap()) return;
Map* parent = Map::cast(potential_parent);
// Follow back pointers, setting them to prototype, clearing map transitions
// when necessary.
Map* current = map;
// Follow back pointer, check whether we are dealing with a map transition
// from a live map to a dead path and in case clear transitions of parent.
bool current_is_alive = map_mark.Get();
bool on_dead_path = !current_is_alive;
while (current->IsMap()) {
Object* next = current->prototype();
// There should never be a dead map above a live map.
ASSERT(on_dead_path || current_is_alive);
// A live map above a dead map indicates a dead transition. This test will
// always be false on the first iteration.
if (on_dead_path && current_is_alive) {
on_dead_path = false;
current->ClearNonLiveTransitions(heap(), real_prototype);
}
Object** slot = HeapObject::RawField(current, Map::kPrototypeOffset);
*slot = real_prototype;
if (current_is_alive) RecordSlot(slot, slot, real_prototype);
current = reinterpret_cast<Map*>(next);
current_is_alive = Marking::MarkBitFrom(current).Get();
bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
if (!current_is_alive && parent_is_alive) {
parent->ClearNonLiveTransitions(heap());
}
}
......
......@@ -642,13 +642,6 @@ class MarkCompactCollector {
void ProcessNewlyMarkedObject(HeapObject* obj);
// Creates back pointers for all map transitions, stores them in
// the prototype field. The original prototype pointers are restored
// in ClearNonLiveTransitions(). All JSObject maps
// connected by map transitions have the same prototype object, which
// is why we can use this field temporarily for back pointers.
void CreateBackPointers();
// Mark a Map and its DescriptorArray together, skipping transitions.
void MarkMapContents(Map* map);
void MarkAccessorPairSlot(HeapObject* accessors, int offset);
......
......@@ -303,6 +303,8 @@ void Map::MapVerify() {
instance_size() < HEAP->Capacity()));
VerifyHeapPointer(prototype());
VerifyHeapPointer(instance_descriptors());
SLOW_ASSERT(instance_descriptors()->IsSortedNoDuplicates());
SLOW_ASSERT(instance_descriptors()->IsConsistentWithBackPointers(this));
}
......@@ -894,6 +896,61 @@ bool DescriptorArray::IsSortedNoDuplicates() {
}
static bool CheckOneBackPointer(Map* current_map, Object* target) {
return !target->IsMap() || Map::cast(target)->GetBackPointer() == current_map;
}
bool DescriptorArray::IsConsistentWithBackPointers(Map* current_map) {
for (int i = 0; i < number_of_descriptors(); ++i) {
switch (GetType(i)) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
if (!CheckOneBackPointer(current_map, GetValue(i))) {
return false;
}
break;
case ELEMENTS_TRANSITION: {
Object* object = GetValue(i);
if (!CheckOneBackPointer(current_map, object)) {
return false;
}
if (object->IsFixedArray()) {
FixedArray* array = FixedArray::cast(object);
for (int i = 0; i < array->length(); ++i) {
if (!CheckOneBackPointer(current_map, array->get(i))) {
return false;
}
}
}
break;
}
case CALLBACKS: {
Object* object = GetValue(i);
if (object->IsAccessorPair()) {
AccessorPair* accessors = AccessorPair::cast(object);
if (!CheckOneBackPointer(current_map, accessors->getter())) {
return false;
}
if (!CheckOneBackPointer(current_map, accessors->setter())) {
return false;
}
}
break;
}
case NORMAL:
case FIELD:
case CONSTANT_FUNCTION:
case HANDLER:
case INTERCEPTOR:
case NULL_DESCRIPTOR:
break;
}
}
return true;
}
void JSFunctionResultCache::JSFunctionResultCacheVerify() {
JSFunction::cast(get(kFactoryIndex))->Verify();
......
......@@ -3407,14 +3407,66 @@ void Map::set_bit_field3(int value) {
}
FixedArray* Map::unchecked_prototype_transitions() {
return reinterpret_cast<FixedArray*>(
READ_FIELD(this, kPrototypeTransitionsOffset));
Object* Map::GetBackPointer() {
Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
if (object->IsFixedArray()) {
return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
} else {
return object;
}
}
void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
Heap* heap = GetHeap();
ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
(value->IsMap() && GetBackPointer()->IsUndefined()));
Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
if (object->IsFixedArray()) {
FixedArray::cast(object)->set(
kProtoTransitionBackPointerOffset, value, mode);
} else {
WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
CONDITIONAL_WRITE_BARRIER(
heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
}
}
FixedArray* Map::prototype_transitions() {
Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
if (object->IsFixedArray()) {
return FixedArray::cast(object);
} else {
return GetHeap()->empty_fixed_array();
}
}
void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
Heap* heap = GetHeap();
ASSERT(value != heap->empty_fixed_array());
value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
CONDITIONAL_WRITE_BARRIER(
heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
}
void Map::init_prototype_transitions(Object* undefined) {
ASSERT(undefined->IsUndefined());
WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
}
HeapObject* Map::unchecked_prototype_transitions() {
Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
return reinterpret_cast<HeapObject*>(object);
}
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
ACCESSORS(Map, constructor, Object, kConstructorOffset)
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
......
......@@ -1604,6 +1604,7 @@ MaybeObject* JSObject::AddFastProperty(String* name,
// We have now allocated all the necessary objects.
// All the changes can be applied at once, so they are atomic.
map()->set_instance_descriptors(old_descriptors);
new_map->SetBackPointer(map());
new_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
set_map(new_map);
return FastPropertyAtPut(index, value);
......@@ -1664,6 +1665,7 @@ MaybeObject* JSObject::AddConstantFunctionProperty(
}
}
old_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
Map::cast(new_map)->SetBackPointer(old_map);
return function;
}
......@@ -1824,6 +1826,7 @@ MaybeObject* JSObject::ConvertDescriptorToFieldAndMapTransition(
}
}
old_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
map()->SetBackPointer(old_map);
return result;
}
......@@ -2408,6 +2411,7 @@ MaybeObject* Map::AddElementsTransition(ElementsKind elements_kind,
return maybe_new_descriptors;
}
set_instance_descriptors(DescriptorArray::cast(new_descriptors));
transitioned_map->SetBackPointer(this);
return this;
}
......@@ -4653,6 +4657,7 @@ static MaybeObject* CreateFreshAccessor(JSObject* obj,
// step 6: everything went well so far, so we make our changes visible
obj->set_map(map2);
map1->set_instance_descriptors(descriptors1);
map2->SetBackPointer(map1);
return obj;
}
......@@ -4705,6 +4710,7 @@ static MaybeObject* NewCallbackTransition(JSObject* obj,
// step 4: everything went well so far, so we make our changes visible
obj->set_map(map3);
accessors2->set(component, map3);
map3->SetBackPointer(map2);
return obj;
}
......@@ -5137,7 +5143,7 @@ class IntrusiveMapTransitionIterator {
// underlying array while it is running.
class IntrusivePrototypeTransitionIterator {
public:
explicit IntrusivePrototypeTransitionIterator(FixedArray* proto_trans)
explicit IntrusivePrototypeTransitionIterator(HeapObject* proto_trans)
: proto_trans_(proto_trans) { }
void Start() {
......@@ -5162,7 +5168,7 @@ class IntrusivePrototypeTransitionIterator {
private:
bool HasTransitions() {
return proto_trans_->length() >= Map::kProtoTransitionHeaderSize;
return proto_trans_->map()->IsSmi() || proto_trans_->IsFixedArray();
}
Object** Header() {
......@@ -5170,12 +5176,16 @@ class IntrusivePrototypeTransitionIterator {
}
int NumberOfTransitions() {
Object* num = proto_trans_->get(Map::kProtoTransitionNumberOfEntriesOffset);
ASSERT(HasTransitions());
FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
return Smi::cast(num)->value();
}
Map* GetTransition(int transitionNumber) {
return Map::cast(proto_trans_->get(IndexFor(transitionNumber)));
ASSERT(HasTransitions());
FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
}
int IndexFor(int transitionNumber) {
......@@ -5184,7 +5194,7 @@ class IntrusivePrototypeTransitionIterator {
transitionNumber * Map::kProtoTransitionElementsPerEntry;
}
FixedArray* proto_trans_;
HeapObject* proto_trans_;
};
......@@ -7346,85 +7356,23 @@ void String::PrintOn(FILE* file) {
}
void Map::CreateOneBackPointer(Object* transition_target) {
if (!transition_target->IsMap()) return;
Map* target = Map::cast(transition_target);
#ifdef DEBUG
// Verify target.
Object* source_prototype = prototype();
Object* target_prototype = target->prototype();
ASSERT(source_prototype->IsJSReceiver() ||
source_prototype->IsMap() ||
source_prototype->IsNull());
ASSERT(target_prototype->IsJSReceiver() ||
target_prototype->IsNull());
ASSERT(source_prototype->IsMap() ||
source_prototype == target_prototype);
#endif
// Point target back to source. set_prototype() will not let us set
// the prototype to a map, as we do here.
*RawField(target, kPrototypeOffset) = this;
}
void Map::CreateBackPointers() {
DescriptorArray* descriptors = instance_descriptors();
for (int i = 0; i < descriptors->number_of_descriptors(); i++) {
switch (descriptors->GetType(i)) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
CreateOneBackPointer(descriptors->GetValue(i));
break;
case ELEMENTS_TRANSITION: {
Object* object = descriptors->GetValue(i);
if (object->IsMap()) {
CreateOneBackPointer(object);
} else {
FixedArray* array = FixedArray::cast(object);
for (int i = 0; i < array->length(); ++i) {
CreateOneBackPointer(array->get(i));
}
}
break;
}
case CALLBACKS: {
Object* object = descriptors->GetValue(i);
if (object->IsAccessorPair()) {
AccessorPair* accessors = AccessorPair::cast(object);
CreateOneBackPointer(accessors->getter());
CreateOneBackPointer(accessors->setter());
}
break;
}
case NORMAL:
case FIELD:
case CONSTANT_FUNCTION:
case HANDLER:
case INTERCEPTOR:
case NULL_DESCRIPTOR:
break;
}
}
}
bool Map::RestoreOneBackPointer(Object* object,
Object* real_prototype,
bool* keep_entry) {
if (!object->IsMap()) return false;
Map* map = Map::cast(object);
// Clear a possible back pointer in case the transition leads to a dead map.
// Return true in case a back pointer has been cleared and false otherwise.
// Set *keep_entry to true when a live map transition has been found.
static bool ClearBackPointer(Heap* heap, Object* target, bool* keep_entry) {
if (!target->IsMap()) return false;
Map* map = Map::cast(target);
if (Marking::MarkBitFrom(map).Get()) {
*keep_entry = true;
return false;
} else {
map->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
return true;
}
ASSERT(map->prototype() == this || map->prototype() == real_prototype);
// Getter prototype() is read-only, set_prototype() has side effects.
*RawField(map, Map::kPrototypeOffset) = real_prototype;
return true;
}
void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
void Map::ClearNonLiveTransitions(Heap* heap) {
DescriptorArray* d = DescriptorArray::cast(
*RawField(this, Map::kInstanceDescriptorsOrBitField3Offset));
if (d->IsEmpty()) return;
......@@ -7437,24 +7385,22 @@ void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
// If the pair (value, details) is a map transition, check if the target is
// live. If not, null the descriptor. Also drop the back pointer for that
// map transition, so that this map is not reached again by following a back
// pointer from a non-live object.
// pointer from that non-live map.
bool keep_entry = false;
PropertyDetails details(Smi::cast(contents->get(i + 1)));
switch (details.type()) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
RestoreOneBackPointer(contents->get(i), real_prototype, &keep_entry);
ClearBackPointer(heap, contents->get(i), &keep_entry);
break;
case ELEMENTS_TRANSITION: {
Object* object = contents->get(i);
if (object->IsMap()) {
RestoreOneBackPointer(object, real_prototype, &keep_entry);
ClearBackPointer(heap, object, &keep_entry);
} else {
FixedArray* array = FixedArray::cast(object);
for (int j = 0; j < array->length(); ++j) {
if (RestoreOneBackPointer(array->get(j),
real_prototype,
&keep_entry)) {
if (ClearBackPointer(heap, array->get(j), &keep_entry)) {
array->set_undefined(j);
}
}
......@@ -7465,14 +7411,10 @@ void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
Object* object = contents->get(i);
if (object->IsAccessorPair()) {
AccessorPair* accessors = AccessorPair::cast(object);
if (RestoreOneBackPointer(accessors->getter(),
real_prototype,
&keep_entry)) {
if (ClearBackPointer(heap, accessors->getter(), &keep_entry)) {
accessors->set_getter(heap->the_hole_value());
}
if (RestoreOneBackPointer(accessors->setter(),
real_prototype,
&keep_entry)) {
if (ClearBackPointer(heap, accessors->setter(), &keep_entry)) {
accessors->set_setter(heap->the_hole_value());
}
} else {
......
......@@ -2609,6 +2609,9 @@ class DescriptorArray: public FixedArray {
// Is the descriptor array sorted and without duplicates?
bool IsSortedNoDuplicates();
// Is the descriptor array consistent with the back pointers in targets?
bool IsConsistentWithBackPointers(Map* current_map);
// Are two DescriptorArrays equal?
bool IsEqualTo(DescriptorArray* other);
#endif
......@@ -4719,19 +4722,30 @@ class Map: public HeapObject {
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
// [back pointer]: points back to the parent map from which a transition
// leads to this map. The field overlaps with prototype transitions and the
// back pointer will be moved into the prototype transitions array if
// required.
inline Object* GetBackPointer();
inline void SetBackPointer(Object* value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// [prototype transitions]: cache of prototype transitions.
// Prototype transition is a transition that happens
// when we change object's prototype to a new one.
// Cache format:
// 0: finger - index of the first free cell in the cache
// 1 + 2 * i: prototype
// 2 + 2 * i: target map
// 1: back pointer that overlaps with prototype transitions field.
// 2 + 2 * i: prototype
// 3 + 2 * i: target map
DECL_ACCESSORS(prototype_transitions, FixedArray)
inline FixedArray* unchecked_prototype_transitions();
inline void init_prototype_transitions(Object* undefined);
inline HeapObject* unchecked_prototype_transitions();
static const int kProtoTransitionHeaderSize = 1;
static const int kProtoTransitionHeaderSize = 2;
static const int kProtoTransitionNumberOfEntriesOffset = 0;
static const int kProtoTransitionBackPointerOffset = 1;
static const int kProtoTransitionElementsPerEntry = 2;
static const int kProtoTransitionPrototypeOffset = 0;
static const int kProtoTransitionMapOffset = 1;
......@@ -4803,25 +4817,10 @@ class Map: public HeapObject {
// Removes a code object from the code cache at the given index.
void RemoveFromCodeCache(String* name, Code* code, int index);
// For every transition in this map, makes the transition's
// target's prototype pointer point back to this map.
// This is undone in MarkCompactCollector::ClearNonLiveTransitions().
void CreateBackPointers();
void CreateOneBackPointer(Object* transition_target);
// Set all map transitions from this map to dead maps to null.
// Also, restore the original prototype on the targets of these
// transitions, so that we do not process this map again while
// following back pointers.
void ClearNonLiveTransitions(Heap* heap, Object* real_prototype);
// Restore a possible back pointer in the prototype field of object.
// Return true in that case and false otherwise. Set *keep_entry to
// true when a live map transition has been found.
bool RestoreOneBackPointer(Object* object,
Object* real_prototype,
bool* keep_entry);
// Set all map transitions from this map to dead maps to null. Also clear
// back pointers in transition targets so that we do not process this map
// again while following back pointers.
void ClearNonLiveTransitions(Heap* heap);
// Computes a hash value for this map, to be used in HashTables and such.
int Hash();
......@@ -4903,16 +4902,17 @@ class Map: public HeapObject {
kConstructorOffset + kPointerSize;
static const int kCodeCacheOffset =
kInstanceDescriptorsOrBitField3Offset + kPointerSize;
static const int kPrototypeTransitionsOffset =
static const int kPrototypeTransitionsOrBackPointerOffset =
kCodeCacheOffset + kPointerSize;
static const int kPadStart = kPrototypeTransitionsOffset + kPointerSize;
static const int kPadStart =
kPrototypeTransitionsOrBackPointerOffset + kPointerSize;
static const int kSize = MAP_POINTER_ALIGN(kPadStart);
// Layout of pointer fields. Heap iteration code relies on them
// being continuously allocated.
static const int kPointerFieldsBeginOffset = Map::kPrototypeOffset;
static const int kPointerFieldsEndOffset =
Map::kPrototypeTransitionsOffset + kPointerSize;
kPrototypeTransitionsOrBackPointerOffset + kPointerSize;
// Byte offsets within kInstanceSizesOffset.
static const int kInstanceSizeOffset = kInstanceSizesOffset + 0;
......
......@@ -2146,10 +2146,16 @@ void V8HeapExplorer::ExtractMapReferences(HeapEntry* entry, Map* map) {
"descriptors", map->instance_descriptors(),
Map::kInstanceDescriptorsOrBitField3Offset);
}
TagObject(map->prototype_transitions(), "(prototype transitions)");
SetInternalReference(map, entry,
"prototype_transitions", map->prototype_transitions(),
Map::kPrototypeTransitionsOffset);
if (map->unchecked_prototype_transitions()->IsFixedArray()) {
TagObject(map->prototype_transitions(), "(prototype transitions)");
SetInternalReference(map, entry,
"prototype_transitions", map->prototype_transitions(),
Map::kPrototypeTransitionsOrBackPointerOffset);
} else {
SetInternalReference(map, entry,
"back_pointer", map->GetBackPointer(),
Map::kPrototypeTransitionsOrBackPointerOffset);
}
SetInternalReference(map, entry,
"code_cache", map->code_cache(),
Map::kCodeCacheOffset);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment