Integrate map marking into static marking visitor.

This refactors the specialized marking of map contents to be done by the
static marking visitor shared between full and incremental marking. This
also fixes an issue where some maps weren't accounted for in the object
stats tracker. But more importantly, it simplifies the code base.

R=verwaest@chromium.org

Review URL: https://codereview.chromium.org/10919294

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12526 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 4bd4fb1a
......@@ -1499,13 +1499,6 @@ class Heap {
void ClearNormalizedMapCaches();
// Clears the cache of ICs related to this map.
void ClearCacheOnMap(Map* map) {
if (FLAG_cleanup_code_caches_at_gc) {
map->ClearCodeCache(this);
}
}
GCTracer* tracer() { return tracer_; }
// Returns the size of objects residing in non new spaces.
......
......@@ -125,27 +125,6 @@ void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) {
}
bool IncrementalMarking::MarkObjectAndPush(HeapObject* obj) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
if (!mark_bit.Get()) {
WhiteToGreyAndPush(obj, mark_bit);
return true;
}
return false;
}
bool IncrementalMarking::MarkObjectWithoutPush(HeapObject* obj) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
if (!mark_bit.Get()) {
mark_bit.Set();
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
return true;
}
return false;
}
} } // namespace v8::internal
#endif // V8_INCREMENTAL_MARKING_INL_H_
......@@ -44,7 +44,6 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
state_(STOPPED),
marking_deque_memory_(NULL),
marking_deque_memory_committed_(false),
marker_(this, heap->mark_compact_collector()),
steps_count_(0),
steps_took_(0),
longest_step_(0.0),
......@@ -226,6 +225,7 @@ class IncrementalMarkingMarkingVisitor
}
}
// Marks the object grey and pushes it on the marking stack.
INLINE(static void MarkObject(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
......@@ -238,6 +238,20 @@ class IncrementalMarkingMarkingVisitor
heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
}
}
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
if (Marking::IsWhite(mark_bit)) {
mark_bit.Set();
MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
heap_object->Size());
return true;
}
return false;
}
};
......@@ -641,23 +655,6 @@ void IncrementalMarking::Hurry() {
} else if (map == native_context_map) {
// Native contexts have weak fields.
IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
} else if (map->instance_type() == MAP_TYPE) {
Map* map = Map::cast(obj);
heap_->ClearCacheOnMap(map);
// When map collection is enabled we have to mark through map's
// transitions and back pointers in a special way to make these links
// weak. Only maps for subclasses of JSReceiver can have transitions.
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
if (FLAG_collect_maps &&
map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
marker_.MarkMapContents(map);
} else {
IncrementalMarkingMarkingVisitor::VisitPointers(
heap_,
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
}
} else {
MarkBit map_mark_bit = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark_bit)) {
......@@ -822,23 +819,6 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
} else if (map->instance_type() == MAP_TYPE) {
Map* map = Map::cast(obj);
heap_->ClearCacheOnMap(map);
// When map collection is enabled we have to mark through map's
// transitions and back pointers in a special way to make these links
// weak. Only maps for subclasses of JSReceiver can have transitions.
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
if (FLAG_collect_maps &&
map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
marker_.MarkMapContents(map);
} else {
IncrementalMarkingMarkingVisitor::VisitPointers(
heap_,
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
}
} else {
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
}
......
......@@ -175,16 +175,6 @@ class IncrementalMarking {
return true;
}
// Marks the object grey and pushes it on the marking stack.
// Returns true if object needed marking and false otherwise.
// This is for incremental marking only.
INLINE(bool MarkObjectAndPush(HeapObject* obj));
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
// This is for incremental marking only.
INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
inline int steps_count() {
return steps_count_;
}
......@@ -275,7 +265,6 @@ class IncrementalMarking {
VirtualMemory* marking_deque_memory_;
bool marking_deque_memory_committed_;
MarkingDeque marking_deque_;
Marker<IncrementalMarking> marker_;
int steps_count_;
double steps_took_;
......
......@@ -52,32 +52,15 @@ void MarkCompactCollector::SetFlags(int flags) {
}
bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
if (MarkObjectWithoutPush(obj)) {
marking_deque_.PushBlack(obj);
return true;
}
return false;
}
void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
if (!mark_bit.Get()) {
mark_bit.Set();
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
ProcessNewlyMarkedObject(obj);
}
}
bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
if (!mark_bit.Get()) {
SetMark(obj, mark_bit);
return true;
ASSERT(IsMarked(obj));
ASSERT(HEAP->Contains(obj));
marking_deque_.PushBlack(obj);
}
return false;
}
......@@ -86,9 +69,6 @@ void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
mark_bit.Set();
MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
if (obj->IsMap()) {
heap_->ClearCacheOnMap(Map::cast(obj));
}
}
......
......@@ -68,8 +68,7 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
migration_slots_buffer_(NULL),
heap_(NULL),
code_flusher_(NULL),
encountered_weak_maps_(NULL),
marker_(this, this) { }
encountered_weak_maps_(NULL) { }
#ifdef DEBUG
......@@ -1067,11 +1066,23 @@ class MarkCompactMarkingVisitor
}
}
// Marks the object black and pushes it on the marking stack.
INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
MarkBit mark = Marking::MarkBitFrom(object);
heap->mark_compact_collector()->MarkObject(object, mark);
}
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
MarkBit mark_bit = Marking::MarkBitFrom(object);
if (!mark_bit.Get()) {
heap->mark_compact_collector()->SetMark(object, mark_bit);
return true;
}
return false;
}
// Mark object pointed to by p.
INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
Object** anchor_slot,
......@@ -1879,97 +1890,6 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
};
void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
ASSERT(IsMarked(object));
ASSERT(HEAP->Contains(object));
if (object->IsMap()) {
Map* map = Map::cast(object);
heap_->ClearCacheOnMap(map);
// When map collection is enabled we have to mark through map's transitions
// in a special way to make transition links weak. Only maps for subclasses
// of JSReceiver can have transitions.
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
marker_.MarkMapContents(map);
} else {
marking_deque_.PushBlack(map);
}
} else {
marking_deque_.PushBlack(object);
}
}
// Force instantiation of template instances.
template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
template <class T>
void Marker<T>::MarkMapContents(Map* map) {
// Make sure that the back pointer stored either in the map itself or inside
// its transitions array is marked. Treat pointers in the transitions array as
// weak and also mark that array to prevent visiting it later.
base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
Object** transitions_slot =
HeapObject::RawField(map, Map::kTransitionsOrBackPointerOffset);
Object* transitions = *transitions_slot;
if (transitions->IsTransitionArray()) {
MarkTransitionArray(reinterpret_cast<TransitionArray*>(transitions));
} else {
// Already marked by marking map->GetBackPointer().
ASSERT(transitions->IsMap() || transitions->IsUndefined());
}
// Mark the Object* fields of the Map. Since the transitions array has been
// marked already, it is fine that one of these fields contains a pointer to
// it.
Object** start_slot =
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset);
Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
for (Object** slot = start_slot; slot < end_slot; slot++) {
Object* obj = *slot;
if (!obj->NonFailureIsHeapObject()) continue;
mark_compact_collector()->RecordSlot(start_slot, slot, obj);
base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
}
}
template <class T>
void Marker<T>::MarkTransitionArray(TransitionArray* transitions) {
if (!base_marker()->MarkObjectWithoutPush(transitions)) return;
Object** transitions_start = transitions->data_start();
// We don't have to record the descriptors_pointer slot since the cell space
// is not compacted.
JSGlobalPropertyCell* descriptors_cell = transitions->descriptors_pointer();
base_marker()->MarkObjectAndPush(descriptors_cell);
if (transitions->HasPrototypeTransitions()) {
// Mark prototype transitions array but don't push it into marking stack.
// This will make references from it weak. We will clean dead prototype
// transitions in ClearNonLiveTransitions.
Object** proto_trans_slot = transitions->GetPrototypeTransitionsSlot();
HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
base_marker()->MarkObjectWithoutPush(prototype_transitions);
mark_compact_collector()->RecordSlot(
transitions_start, proto_trans_slot, prototype_transitions);
}
for (int i = 0; i < transitions->number_of_transitions(); ++i) {
Object** key_slot = transitions->GetKeySlot(i);
Object* key = *key_slot;
if (key->IsHeapObject()) {
base_marker()->MarkObjectAndPush(HeapObject::cast(key));
mark_compact_collector()->RecordSlot(transitions_start, key_slot, key);
}
}
}
// Fill the marking stack with overflowed objects returned by the given
// iterator. Stop when the marking stack is filled or the end of the space
// is reached, whichever comes first.
......
......@@ -403,33 +403,6 @@ class SlotsBuffer {
};
// -------------------------------------------------------------------------
// Marker shared between incremental and non-incremental marking
template<class BaseMarker> class Marker {
public:
Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector)
: base_marker_(base_marker),
mark_compact_collector_(mark_compact_collector) {}
// Mark pointers in a Map and its DescriptorArray together, possibly
// treating transitions or back pointers weak.
void MarkMapContents(Map* map);
void MarkTransitionArray(TransitionArray* transitions);
private:
BaseMarker* base_marker() {
return base_marker_;
}
MarkCompactCollector* mark_compact_collector() {
return mark_compact_collector_;
}
BaseMarker* base_marker_;
MarkCompactCollector* mark_compact_collector_;
};
// Defined in isolate.h.
class ThreadLocalTop;
......@@ -656,8 +629,6 @@ class MarkCompactCollector {
friend class MarkCompactMarkingVisitor;
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
friend class Marker<IncrementalMarking>;
friend class Marker<MarkCompactCollector>;
// Mark non-optimize code for functions inlined into the given optimized
// code. This will prevent it from being flushed.
......@@ -674,26 +645,14 @@ class MarkCompactCollector {
void AfterMarking();
// Marks the object black and pushes it on the marking stack.
// Returns true if object needed marking and false otherwise.
// This is for non-incremental marking only.
INLINE(bool MarkObjectAndPush(HeapObject* obj));
// Marks the object black and pushes it on the marking stack.
// This is for non-incremental marking only.
INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
// This is for non-incremental marking only.
INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
// Marks the object black assuming that it is not yet marked.
// This is for non-incremental marking only.
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
void ProcessNewlyMarkedObject(HeapObject* obj);
// Mark the heap roots and all objects reachable from them.
void MarkRoots(RootMarkingVisitor* visitor);
......@@ -796,7 +755,6 @@ class MarkCompactCollector {
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
Object* encountered_weak_maps_;
Marker<MarkCompactCollector> marker_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
......
......@@ -134,12 +134,9 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
Oddball::BodyDescriptor,
void>::Visit);
table_.Register(kVisitMap,
&FixedBodyVisitor<StaticVisitor,
Map::BodyDescriptor,
void>::Visit);
table_.Register(kVisitMap, &VisitMap);
table_.Register(kVisitCode, &StaticVisitor::VisitCode);
table_.Register(kVisitCode, &VisitCode);
// Registration for kVisitSharedFunctionInfo is done by StaticVisitor.
......@@ -246,6 +243,32 @@ void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitMap(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
Map* map_object = Map::cast(object);
// Clears the cache of ICs related to this map.
if (FLAG_cleanup_code_caches_at_gc) {
map_object->ClearCodeCache(heap);
}
// When map collection is enabled we have to mark through map's
// transitions and back pointers in a special way to make these links
// weak. Only maps for subclasses of JSReceiver can have transitions.
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
if (FLAG_collect_maps &&
map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
MarkMapContents(heap, map_object);
} else {
StaticVisitor::VisitPointers(heap,
HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
}
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitCode(
Map* map, HeapObject* object) {
......@@ -269,6 +292,60 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
Heap* heap, Map* map) {
// Make sure that the back pointer stored either in the map itself or
// inside its transitions array is marked. Skip recording the back
// pointer slot since map space is not compacted.
StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
// Treat pointers in the transitions array as weak and also mark that
// array to prevent visiting it later. Skip recording the transition
// array slot, since it will be implicitly recorded when the pointer
// fields of this map are visited.
TransitionArray* transitions = map->unchecked_transition_array();
if (transitions->IsTransitionArray()) {
MarkTransitionArray(heap, transitions);
} else {
// Already marked by marking map->GetBackPointer() above.
ASSERT(transitions->IsMap() || transitions->IsUndefined());
}
// Mark the pointer fields of the Map. Since the transitions array has
// been marked already, it is fine that one of these fields contains a
// pointer to it.
StaticVisitor::VisitPointers(heap,
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
Heap* heap, TransitionArray* transitions) {
if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
// Skip recording the descriptors_pointer slot since the cell space
// is not compacted and descriptors are referenced through a cell.
StaticVisitor::MarkObject(heap, transitions->descriptors_pointer());
if (transitions->HasPrototypeTransitions()) {
// Mark prototype transitions array but do not push it onto marking
// stack, this will make references from it weak. We will clean dead
// prototype transitions in ClearNonLiveTransitions.
Object** slot = transitions->GetPrototypeTransitionsSlot();
HeapObject* obj = HeapObject::cast(*slot);
heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
StaticVisitor::MarkObjectWithoutPush(heap, obj);
}
for (int i = 0; i < transitions->number_of_transitions(); ++i) {
StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
}
}
void Code::CodeIterateBody(ObjectVisitor* v) {
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
......
......@@ -398,9 +398,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
static inline void VisitNativeContext(Map* map, HeapObject* object);
protected:
static inline void VisitMap(Map* map, HeapObject* object);
static inline void VisitCode(Map* map, HeapObject* object);
static inline void VisitJSRegExp(Map* map, HeapObject* object);
// Mark pointers in a Map and its TransitionArray together, possibly
// treating transitions or back pointers weak.
static void MarkMapContents(Heap* heap, Map* map);
static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
class DataObjectVisitor {
public:
template<int size>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment