Commit 870eba4c authored by vegorov@chromium.org's avatar vegorov@chromium.org

Process weak references between optimized JSFunctions on scavenges.

R=mstarzinger@chromium.org

Review URL: https://chromiumcodereview.appspot.com/10091027

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11347 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 4f5d3361
......@@ -1124,6 +1124,27 @@ void PromotionQueue::RelocateQueueHead() {
}
class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
public:
explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
virtual Object* RetainAs(Object* object) {
if (!heap_->InFromSpace(object)) {
return object;
}
MapWord map_word = HeapObject::cast(object)->map_word();
if (map_word.IsForwardingAddress()) {
return map_word.ToForwardingAddress();
}
return NULL;
}
private:
Heap* heap_;
};
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
......@@ -1222,6 +1243,9 @@ void Heap::Scavenge() {
}
incremental_marking()->UpdateMarkingDequeAfterScavenge();
ScavengeWeakObjectRetainer weak_object_retainer(this);
ProcessWeakReferences(&weak_object_retainer);
ASSERT(new_space_front == new_space_.top());
// Set age mark.
......@@ -1308,7 +1332,8 @@ void Heap::UpdateReferencesInExternalStringTable(
static Object* ProcessFunctionWeakReferences(Heap* heap,
Object* function,
WeakObjectRetainer* retainer) {
WeakObjectRetainer* retainer,
bool record_slots) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
JSFunction* tail = NULL;
......@@ -1325,6 +1350,12 @@ static Object* ProcessFunctionWeakReferences(Heap* heap,
// Subsequent elements in the list.
ASSERT(tail != NULL);
tail->set_next_function_link(retain);
if (record_slots) {
Object** next_function =
HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset);
heap->mark_compact_collector()->RecordSlot(
next_function, next_function, retain);
}
}
// Retained function is new tail.
candidate_function = reinterpret_cast<JSFunction*>(retain);
......@@ -1353,6 +1384,15 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Object* head = undefined;
Context* tail = NULL;
Object* candidate = global_contexts_list_;
// We don't record weak slots during marking or scavenges.
// Instead we do it once when we complete mark-compact cycle.
// Note that write barrier has no effect if we are already in the middle of
// compacting mark-sweep cycle and we have to record slots manually.
bool record_slots =
gc_state() == MARK_COMPACT &&
mark_compact_collector()->is_compacting();
while (candidate != undefined) {
// Check whether to keep the candidate in the list.
Context* candidate_context = reinterpret_cast<Context*>(candidate);
......@@ -1368,6 +1408,14 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Context::NEXT_CONTEXT_LINK,
retain,
UPDATE_WRITE_BARRIER);
if (record_slots) {
Object** next_context =
HeapObject::RawField(
tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK));
mark_compact_collector()->RecordSlot(
next_context, next_context, retain);
}
}
// Retained context is new tail.
candidate_context = reinterpret_cast<Context*>(retain);
......@@ -1380,11 +1428,19 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
ProcessFunctionWeakReferences(
this,
candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
retainer);
retainer,
record_slots);
candidate_context->set_unchecked(this,
Context::OPTIMIZED_FUNCTIONS_LIST,
function_list_head,
UPDATE_WRITE_BARRIER);
if (record_slots) {
Object** optimized_functions =
HeapObject::RawField(
tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST));
mark_compact_collector()->RecordSlot(
optimized_functions, optimized_functions, function_list_head);
}
}
// Move to next element in the list.
......@@ -1678,7 +1734,12 @@ class ScavengingVisitor : public StaticVisitorBase {
MigrateObject(heap, object, target, object_size);
if (object_contents == POINTER_OBJECT) {
heap->promotion_queue()->insert(target, object_size);
if (map->instance_type() == JS_FUNCTION_TYPE) {
heap->promotion_queue()->insert(
target, JSFunction::kNonWeakFieldsEndOffset);
} else {
heap->promotion_queue()->insert(target, object_size);
}
}
heap->tracer()->increment_promoted_objects_size(object_size);
......
......@@ -830,6 +830,19 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
VisitGlobalContext(ctx, &marking_visitor);
} else if (map->instance_type() == JS_FUNCTION_TYPE) {
marking_visitor.VisitPointers(
HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
marking_visitor.VisitCodeEntry(
obj->address() + JSFunction::kCodeEntryOffset);
marking_visitor.VisitPointers(
HeapObject::RawField(obj,
JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(obj,
JSFunction::kNonWeakFieldsEndOffset));
} else {
obj->IterateBody(map->instance_type(), size, &marking_visitor);
}
......
......@@ -1525,12 +1525,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object,
JSFunction::kNonWeakFieldsEndOffset));
// Don't visit the next function list field as it is a weak reference.
Object** next_function =
HeapObject::RawField(object, JSFunction::kNextFunctionLinkOffset);
heap->mark_compact_collector()->RecordSlot(
next_function, next_function, *next_function);
}
static inline void VisitJSRegExpFields(Map* map,
......
......@@ -544,6 +544,8 @@ class MarkCompactCollector {
void ClearMarkbits();
bool is_compacting() const { return compacting_; }
private:
MarkCompactCollector();
~MarkCompactCollector();
......
......@@ -72,9 +72,7 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
table_.Register(kVisitJSFunction,
&JSObjectVisitor::
template VisitSpecialized<JSFunction::kSize>);
table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitFreeSpace, &VisitFreeSpace);
......
......@@ -289,6 +289,23 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
}
private:
static inline int VisitJSFunction(Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
VisitPointers(heap,
HeapObject::RawField(object, JSFunction::kPropertiesOffset),
HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
// Don't visit code entry. We are using this visitor only during scavenges.
VisitPointers(
heap,
HeapObject::RawField(object,
JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object,
JSFunction::kNonWeakFieldsEndOffset));
return JSFunction::kSize;
}
static inline int VisitByteArray(Map* map, HeapObject* object) {
return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment