Move code flushing support into shared visitor.

This is a first step towards incremental code flushing. The code
flushing support is now shared between full and incremental marking.
The code flusher itself is not yet activated in incremental mode and
will require some additional adaptations.

R=ulan@chromium.org
BUG=v8:1609

Review URL: https://codereview.chromium.org/11028016

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12714 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent b3c0ed82
......@@ -181,10 +181,6 @@ class IncrementalMarkingMarkingVisitor
static void Initialize() {
StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitJSRegExp, &VisitJSRegExp);
}
......@@ -195,31 +191,7 @@ class IncrementalMarkingMarkingVisitor
HeapObject::RawField(object, JSWeakMap::kSize));
}
static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
if (shared->ic_age() != heap->global_ic_age()) {
shared->ResetForNewContext(heap->global_ic_age());
}
FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
SharedFunctionInfo::BodyDescriptor,
void>::Visit(map, object);
}
static inline void VisitJSFunction(Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
// Iterate over all fields in the body but take care in dealing with
// the code entry and skip weak fields.
VisitPointers(heap,
HeapObject::RawField(object, JSFunction::kPropertiesOffset),
HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
VisitPointers(heap,
HeapObject::RawField(object,
JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object,
JSFunction::kNonWeakFieldsEndOffset));
}
static void BeforeVisitingSharedFunctionInfo(HeapObject* object) {}
INLINE(static void VisitPointer(Heap* heap, Object** p)) {
Object* obj = *p;
......
This diff is collapsed.
......@@ -403,6 +403,81 @@ class SlotsBuffer {
};
// CodeFlusher collects candidates for code flushing during marking and
// processes those candidates after marking has completed in order to
// reset those functions referencing code objects that would otherwise
// be unreachable. Code objects can be referenced in two ways:
// - SharedFunctionInfo references unoptimized code.
// - JSFunction references either unoptimized or optimized code.
// We are not allowed to flush unoptimized code for functions that got
// optimized or inlined into optimized code, because we might bailout
// into the unoptimized code again during deoptimization.
class CodeFlusher {
public:
explicit CodeFlusher(Isolate* isolate)
: isolate_(isolate),
jsfunction_candidates_head_(NULL),
shared_function_info_candidates_head_(NULL) {}
void AddCandidate(SharedFunctionInfo* shared_info) {
SetNextCandidate(shared_info, shared_function_info_candidates_head_);
shared_function_info_candidates_head_ = shared_info;
}
void AddCandidate(JSFunction* function) {
ASSERT(function->code() == function->shared()->code());
SetNextCandidate(function, jsfunction_candidates_head_);
jsfunction_candidates_head_ = function;
}
void ProcessCandidates() {
ProcessSharedFunctionInfoCandidates();
ProcessJSFunctionCandidates();
}
private:
void ProcessJSFunctionCandidates();
void ProcessSharedFunctionInfoCandidates();
static JSFunction** GetNextCandidateField(JSFunction* candidate) {
return reinterpret_cast<JSFunction**>(
candidate->address() + JSFunction::kCodeEntryOffset);
}
static JSFunction* GetNextCandidate(JSFunction* candidate) {
return *GetNextCandidateField(candidate);
}
static void SetNextCandidate(JSFunction* candidate,
JSFunction* next_candidate) {
*GetNextCandidateField(candidate) = next_candidate;
}
static SharedFunctionInfo** GetNextCandidateField(
SharedFunctionInfo* candidate) {
Code* code = candidate->code();
return reinterpret_cast<SharedFunctionInfo**>(
code->address() + Code::kGCMetadataOffset);
}
static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
return reinterpret_cast<SharedFunctionInfo*>(
candidate->code()->gc_metadata());
}
static void SetNextCandidate(SharedFunctionInfo* candidate,
SharedFunctionInfo* next_candidate) {
candidate->code()->set_gc_metadata(next_candidate);
}
Isolate* isolate_;
JSFunction* jsfunction_candidates_head_;
SharedFunctionInfo* shared_function_info_candidates_head_;
DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
};
// Defined in isolate.h.
class ThreadLocalTop;
......@@ -631,10 +706,6 @@ class MarkCompactCollector {
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
// Mark non-optimize code for functions inlined into the given optimized
// code. This will prevent it from being flushed.
void MarkInlinedFunctionsCode(Code* code);
// Mark code objects that are active on the stack to prevent them
// from being flushed.
void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top);
......
This diff is collapsed.
......@@ -397,9 +397,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// TODO(mstarzinger): This should be made protected once refactoring is done.
static inline void VisitNativeContext(Map* map, HeapObject* object);
// TODO(mstarzinger): This should be made protected once refactoring is done.
// Mark non-optimize code for functions inlined into the given optimized
// code. This will prevent it from being flushed.
static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
protected:
static inline void VisitMap(Map* map, HeapObject* object);
static inline void VisitCode(Map* map, HeapObject* object);
static inline void VisitSharedFunctionInfo(Map* map, HeapObject* object);
static inline void VisitJSFunction(Map* map, HeapObject* object);
static inline void VisitJSRegExp(Map* map, HeapObject* object);
// Mark pointers in a Map and its TransitionArray together, possibly
......@@ -407,6 +414,17 @@ class StaticMarkingVisitor : public StaticVisitorBase {
static void MarkMapContents(Heap* heap, Map* map);
static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
// Code flushing support.
static inline bool IsFlushable(Heap* heap, JSFunction* function);
static inline bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info);
// Helpers used by code flushing support that visit pointer fields and treat
// references to code objects either strongly or weakly.
static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
class DataObjectVisitor {
public:
template<int size>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment