Commit 9ed501d9 authored by vegorov@chromium.org's avatar vegorov@chromium.org

Merge flush code phase into marking phase.

Review URL: http://codereview.chromium.org/3135026

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5284 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 1c1f4161
......@@ -79,10 +79,9 @@ class CompilationSubCache {
// young generation.
void Age();
bool HasFunction(SharedFunctionInfo* function_info);
// GC support.
void Iterate(ObjectVisitor* v);
void IterateFunctions(ObjectVisitor* v);
// Clear this sub-cache evicting all its content.
void Clear();
......@@ -206,27 +205,6 @@ Handle<CompilationCacheTable> CompilationSubCache::GetTable(int generation) {
}
bool CompilationSubCache::HasFunction(SharedFunctionInfo* function_info) {
if (function_info->script()->IsUndefined() ||
Script::cast(function_info->script())->source()->IsUndefined()) {
return false;
}
String* source =
String::cast(Script::cast(function_info->script())->source());
// Check all generations.
for (int generation = 0; generation < generations(); generation++) {
if (tables_[generation]->IsUndefined()) continue;
CompilationCacheTable* table =
CompilationCacheTable::cast(tables_[generation]);
Object* object = table->Lookup(source);
if (object->IsSharedFunctionInfo()) return true;
}
return false;
}
void CompilationSubCache::Age() {
// Age the generations implicitly killing off the oldest.
for (int i = generations_ - 1; i > 0; i--) {
......@@ -238,6 +216,16 @@ void CompilationSubCache::Age() {
}
void CompilationSubCache::IterateFunctions(ObjectVisitor* v) {
Object* undefined = Heap::raw_unchecked_undefined_value();
for (int i = 0; i < generations_; i++) {
if (tables_[i] != undefined) {
reinterpret_cast<CompilationCacheTable*>(tables_[i])->IterateElements(v);
}
}
}
void CompilationSubCache::Iterate(ObjectVisitor* v) {
v->VisitPointers(&tables_[0], &tables_[generations_]);
}
......@@ -528,15 +516,16 @@ void CompilationCache::Clear() {
}
}
bool CompilationCache::HasFunction(SharedFunctionInfo* function_info) {
return script.HasFunction(function_info);
void CompilationCache::Iterate(ObjectVisitor* v) {
for (int i = 0; i < kSubCacheCount; i++) {
subcaches[i]->Iterate(v);
}
}
void CompilationCache::Iterate(ObjectVisitor* v) {
void CompilationCache::IterateFunctions(ObjectVisitor* v) {
for (int i = 0; i < kSubCacheCount; i++) {
subcaches[i]->Iterate(v);
subcaches[i]->IterateFunctions(v);
}
}
......
......@@ -79,11 +79,9 @@ class CompilationCache {
// Clear the cache - also used to initialize the cache at startup.
static void Clear();
static bool HasFunction(SharedFunctionInfo* function_info);
// GC support.
static void Iterate(ObjectVisitor* v);
static void IterateFunctions(ObjectVisitor* v);
// Notify the cache that a mark-sweep garbage collection is about to
// take place. This is used to retire entries from the cache to
......
......@@ -454,6 +454,7 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
// Check the function has compiled code.
ASSERT(shared->is_compiled());
shared->set_code_age(0);
return true;
}
......
......@@ -243,7 +243,8 @@ class Context: public FixedArray {
GlobalObject* global() {
Object* result = get(GLOBAL_INDEX);
ASSERT(IsBootstrappingOrGlobalObject(result));
ASSERT(Heap::gc_state() != Heap::NOT_IN_GC ||
IsBootstrappingOrGlobalObject(result));
return reinterpret_cast<GlobalObject*>(result);
}
void set_global(GlobalObject* global) { set(GLOBAL_INDEX, global); }
......
......@@ -372,8 +372,8 @@ StackFrame::Type StackFrame::GetCallerState(State* state) const {
}
Code* EntryFrame::code() const {
return Heap::js_entry_code();
Code* EntryFrame::unchecked_code() const {
return Heap::raw_unchecked_js_entry_code();
}
......@@ -395,8 +395,8 @@ StackFrame::Type EntryFrame::GetCallerState(State* state) const {
}
Code* EntryConstructFrame::code() const {
return Heap::js_construct_entry_code();
Code* EntryConstructFrame::unchecked_code() const {
return Heap::raw_unchecked_js_construct_entry_code();
}
......@@ -406,8 +406,8 @@ Object*& ExitFrame::code_slot() const {
}
Code* ExitFrame::code() const {
return Code::cast(code_slot());
Code* ExitFrame::unchecked_code() const {
return reinterpret_cast<Code*>(code_slot());
}
......@@ -493,22 +493,22 @@ bool JavaScriptFrame::IsConstructor() const {
}
Code* JavaScriptFrame::code() const {
Code* JavaScriptFrame::unchecked_code() const {
JSFunction* function = JSFunction::cast(this->function());
return function->shared()->code();
return function->unchecked_code();
}
Code* ArgumentsAdaptorFrame::code() const {
Code* ArgumentsAdaptorFrame::unchecked_code() const {
return Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline);
}
Code* InternalFrame::code() const {
Code* InternalFrame::unchecked_code() const {
const int offset = InternalFrameConstants::kCodeOffset;
Object* code = Memory::Object_at(fp() + offset);
ASSERT(code != NULL);
return Code::cast(code);
return reinterpret_cast<Code*>(code);
}
......
......@@ -158,7 +158,13 @@ class StackFrame BASE_EMBEDDED {
virtual Type type() const = 0;
// Get the code associated with this frame.
virtual Code* code() const = 0;
// This method could be called during marking phase of GC.
virtual Code* unchecked_code() const = 0;
// Get the code associated with this frame.
inline Code* code() const {
return Code::cast(unchecked_code());
}
// Garbage collection support.
static void CookFramesForThread(ThreadLocalTop* thread);
......@@ -224,7 +230,7 @@ class EntryFrame: public StackFrame {
public:
virtual Type type() const { return ENTRY; }
virtual Code* code() const;
virtual Code* unchecked_code() const;
// Garbage collection support.
virtual void Iterate(ObjectVisitor* v) const;
......@@ -255,7 +261,7 @@ class EntryConstructFrame: public EntryFrame {
public:
virtual Type type() const { return ENTRY_CONSTRUCT; }
virtual Code* code() const;
virtual Code* unchecked_code() const;
static EntryConstructFrame* cast(StackFrame* frame) {
ASSERT(frame->is_entry_construct());
......@@ -277,7 +283,7 @@ class ExitFrame: public StackFrame {
enum Mode { MODE_NORMAL, MODE_DEBUG };
virtual Type type() const { return EXIT; }
virtual Code* code() const;
virtual Code* unchecked_code() const;
Object*& code_slot() const;
......@@ -403,7 +409,7 @@ class JavaScriptFrame: public StandardFrame {
int index) const;
// Determine the code for the frame.
virtual Code* code() const;
virtual Code* unchecked_code() const;
static JavaScriptFrame* cast(StackFrame* frame) {
ASSERT(frame->is_java_script());
......@@ -439,7 +445,7 @@ class ArgumentsAdaptorFrame: public JavaScriptFrame {
virtual Type type() const { return ARGUMENTS_ADAPTOR; }
// Determine the code for the frame.
virtual Code* code() const;
virtual Code* unchecked_code() const;
static ArgumentsAdaptorFrame* cast(StackFrame* frame) {
ASSERT(frame->is_arguments_adaptor());
......@@ -469,7 +475,7 @@ class InternalFrame: public StandardFrame {
virtual void Iterate(ObjectVisitor* v) const;
// Determine the code for the frame.
virtual Code* code() const;
virtual Code* unchecked_code() const;
static InternalFrame* cast(StackFrame* frame) {
ASSERT(frame->is_internal());
......
......@@ -773,6 +773,7 @@ bool CompileLazy(Handle<JSFunction> function,
ClearExceptionFlag flag) {
if (function->shared()->is_compiled()) {
function->set_code(function->shared()->code());
function->shared()->set_code_age(0);
return true;
} else {
CompilationInfo info(function, 0, receiver);
......@@ -788,6 +789,7 @@ bool CompileLazyInLoop(Handle<JSFunction> function,
ClearExceptionFlag flag) {
if (function->shared()->is_compiled()) {
function->set_code(function->shared()->code());
function->shared()->set_code_age(0);
return true;
} else {
CompilationInfo info(function, 1, receiver);
......
......@@ -637,12 +637,6 @@ void Heap::PerformGarbageCollection(AllocationSpace space,
int start_new_space_size = Heap::new_space()->Size();
if (collector == MARK_COMPACTOR) {
if (FLAG_flush_code) {
// Flush all potentially unused code.
GCTracer::Scope gc_scope(tracer, GCTracer::Scope::MC_FLUSH_CODE);
FlushCode();
}
// Perform mark-sweep with optional compaction.
MarkCompact(tracer);
......@@ -1100,6 +1094,10 @@ class ScavengingVisitor : public StaticVisitorBase {
&ObjectEvacuationStrategy<POINTER_OBJECT>::
VisitSpecialized<SharedFunctionInfo::kSize>);
table_.Register(kVisitJSFunction,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
VisitSpecialized<JSFunction::kSize>);
table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
kVisitDataObject,
kVisitDataObjectGeneric>();
......@@ -2359,109 +2357,6 @@ Object* Heap::AllocateExternalArray(int length,
}
// The StackVisitor is used to traverse all the archived threads to see if
// there are activations on any of the stacks corresponding to the code.
class FlushingStackVisitor : public ThreadVisitor {
public:
explicit FlushingStackVisitor(Code* code) : found_(false), code_(code) {}
void VisitThread(ThreadLocalTop* top) {
// If we already found the code in a previous traversed thread we return.
if (found_) return;
for (StackFrameIterator it(top); !it.done(); it.Advance()) {
if (code_->contains(it.frame()->pc())) {
found_ = true;
return;
}
}
}
bool FoundCode() {return found_;}
private:
bool found_;
Code* code_;
};
static bool CodeIsActive(Code* code) {
// Make sure we are not referencing the code from the stack.
for (StackFrameIterator it; !it.done(); it.Advance()) {
if (code->contains(it.frame()->pc())) return true;
}
// Iterate the archived stacks in all threads to check if
// the code is referenced.
FlushingStackVisitor threadvisitor(code);
ThreadManager::IterateArchivedThreads(&threadvisitor);
if (threadvisitor.FoundCode()) return true;
return false;
}
static void FlushCodeForFunction(JSFunction* function) {
SharedFunctionInfo* shared_info = function->shared();
// Special handling if the function and shared info objects
// have different code objects.
if (function->code() != shared_info->code()) {
// If the shared function has been flushed but the function has not,
// we flush the function if possible.
if (!shared_info->is_compiled() && function->is_compiled() &&
!CodeIsActive(function->code())) {
function->set_code(shared_info->code());
}
return;
}
// The function must be compiled and have the source code available,
// to be able to recompile it in case we need the function again.
if (!(shared_info->is_compiled() && shared_info->HasSourceCode())) return;
// We never flush code for Api functions.
if (shared_info->IsApiFunction()) return;
// Only flush code for functions.
if (!shared_info->code()->kind() == Code::FUNCTION) return;
// Function must be lazy compilable.
if (!shared_info->allows_lazy_compilation()) return;
// If this is a full script wrapped in a function we do no flush the code.
if (shared_info->is_toplevel()) return;
// If this function is in the compilation cache we do not flush the code.
if (CompilationCache::HasFunction(shared_info)) return;
// Check stack and archived threads for the code.
if (CodeIsActive(shared_info->code())) return;
// Compute the lazy compilable version of the code.
Code* code = Builtins::builtin(Builtins::LazyCompile);
shared_info->set_code(code);
function->set_code(code);
}
void Heap::FlushCode() {
#ifdef ENABLE_DEBUGGER_SUPPORT
// Do not flush code if the debugger is loaded or there are breakpoints.
if (Debug::IsLoaded() || Debug::has_break_points()) return;
#endif
HeapObjectIterator it(old_pointer_space());
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
if (obj->IsJSFunction()) {
JSFunction* function = JSFunction::cast(obj);
// The function must have a valid context and not be a builtin.
if (function->unchecked_context()->IsContext() &&
!function->IsBuiltin()) {
FlushCodeForFunction(function);
}
}
}
}
Object* Heap::CreateCode(const CodeDesc& desc,
Code::Flags flags,
Handle<Object> self_reference) {
......@@ -4822,7 +4717,6 @@ GCTracer::~GCTracer() {
PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
PrintF("flushcode=%d ", static_cast<int>(scopes_[Scope::MC_FLUSH_CODE]));
PrintF("total_size_before=%d ", start_size_);
PrintF("total_size_after=%d ", Heap::SizeOfObjects());
......
......@@ -1258,10 +1258,6 @@ class Heap : public AllStatic {
// Flush the number to string cache.
static void FlushNumberStringCache();
// Flush code from functions we do not expect to use again. The code will
// be replaced with a lazy compilable version.
static void FlushCode();
static void UpdateSurvivalRateTrend(int start_new_space_size);
enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
......
......@@ -27,6 +27,7 @@
#include "v8.h"
#include "compilation-cache.h"
#include "execution.h"
#include "heap-profiler.h"
#include "global-handles.h"
......@@ -252,6 +253,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
table_.GetVisitor(map)(map, obj);
}
static void EnableCodeFlushing(bool enabled) {
if (enabled) {
table_.Register(kVisitJSFunction, &VisitJSFunction);
} else {
table_.Register(kVisitJSFunction,
&JSObjectVisitor::VisitSpecialized<JSFunction::kSize>);
}
}
static void Initialize() {
table_.Register(kVisitShortcutCandidate,
&FixedBodyVisitor<StaticMarkingVisitor,
......@@ -289,6 +299,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
table_.Register(kVisitCode, &VisitCode);
table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitPropertyCell,
&FixedBodyVisitor<StaticMarkingVisitor,
JSGlobalPropertyCell::BodyDescriptor,
......@@ -405,6 +417,134 @@ class StaticMarkingVisitor : public StaticVisitorBase {
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
}
// Code flushing support.
// How many collections newly compiled code object will survive before being
// flushed.
static const int kCodeAgeThreshold = 5;
inline static bool HasSourceCode(SharedFunctionInfo* info) {
Object* undefined = Heap::raw_unchecked_undefined_value();
return (info->script() != undefined) &&
(reinterpret_cast<Script*>(info->script())->source() != undefined);
}
inline static bool IsCompiled(JSFunction* function) {
return
function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
}
inline static bool IsCompiled(SharedFunctionInfo* function) {
return
function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile);
}
static void FlushCodeForFunction(JSFunction* function) {
SharedFunctionInfo* shared_info = function->unchecked_shared();
if (shared_info->IsMarked()) return;
// Special handling if the function and shared info objects
// have different code objects.
if (function->unchecked_code() != shared_info->unchecked_code()) {
// If the shared function has been flushed but the function has not,
// we flush the function if possible.
if (!IsCompiled(shared_info) &&
IsCompiled(function) &&
!function->unchecked_code()->IsMarked()) {
function->set_code(shared_info->unchecked_code());
}
return;
}
// Code is either on stack or in compilation cache.
if (shared_info->unchecked_code()->IsMarked()) {
shared_info->set_code_age(0);
return;
}
// The function must be compiled and have the source code available,
// to be able to recompile it in case we need the function again.
if (!(shared_info->is_compiled() && HasSourceCode(shared_info))) return;
// We never flush code for Api functions.
Object* function_data = shared_info->function_data();
if (function_data->IsHeapObject() &&
(SafeMap(function_data)->instance_type() ==
FUNCTION_TEMPLATE_INFO_TYPE)) {
return;
}
// Only flush code for functions.
if (shared_info->code()->kind() != Code::FUNCTION) return;
// Function must be lazy compilable.
if (!shared_info->allows_lazy_compilation()) return;
// If this is a full script wrapped in a function we do no flush the code.
if (shared_info->is_toplevel()) return;
// Age this shared function info.
if (shared_info->code_age() < kCodeAgeThreshold) {
shared_info->set_code_age(shared_info->code_age() + 1);
return;
}
// Compute the lazy compilable version of the code.
Code* code = Builtins::builtin(Builtins::LazyCompile);
shared_info->set_code(code);
function->set_code(code);
}
static inline Map* SafeMap(Object* obj) {
MapWord map_word = HeapObject::cast(obj)->map_word();
map_word.ClearMark();
map_word.ClearOverflow();
return map_word.ToMap();
}
static inline bool IsJSBuiltinsObject(Object* obj) {
return obj->IsHeapObject() &&
(SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE);
}
static inline bool IsValidNotBuiltinContext(Object* ctx) {
if (!ctx->IsHeapObject()) return false;
Map* map = SafeMap(ctx);
if(!(map == Heap::raw_unchecked_context_map() ||
map == Heap::raw_unchecked_catch_context_map() ||
map == Heap::raw_unchecked_global_context_map())) {
return false;
}
Context* context = reinterpret_cast<Context*>(ctx);
if(IsJSBuiltinsObject(context->global())) {
return false;
}
return true;
}
static void VisitJSFunction(Map* map, HeapObject* object) {
JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
// The function must have a valid context and not be a builtin.
if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
FlushCodeForFunction(jsfunction);
}
JSObjectVisitor::VisitSpecialized<JSFunction::kSize>(map, object);
}
typedef void (*Callback)(Map* map, HeapObject* object);
static VisitorDispatchTable<Callback> table_;
......@@ -435,6 +575,62 @@ class MarkingVisitor : public ObjectVisitor {
};
class CodeMarkingVisitor : public ThreadVisitor {
public:
void VisitThread(ThreadLocalTop* top) {
for (StackFrameIterator it(top); !it.done(); it.Advance()) {
MarkCompactCollector::MarkObject(it.frame()->unchecked_code());
}
}
};
class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
public:
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) VisitPointer(p);
}
void VisitPointer(Object** slot) {
Object* obj = *slot;
if (obj->IsHeapObject()) {
MarkCompactCollector::MarkObject(HeapObject::cast(obj));
}
}
};
void MarkCompactCollector::PrepareForCodeFlushing() {
if (!FLAG_flush_code) {
StaticMarkingVisitor::EnableCodeFlushing(false);
return;
}
#ifdef ENABLE_DEBUGGER_SUPPORT
if (Debug::IsLoaded() || Debug::has_break_points()) {
StaticMarkingVisitor::EnableCodeFlushing(false);
return;
}
#endif
StaticMarkingVisitor::EnableCodeFlushing(true);
// Make sure we are not referencing the code from the stack.
for (StackFrameIterator it; !it.done(); it.Advance()) {
MarkCompactCollector::MarkObject(it.frame()->unchecked_code());
}
// Iterate the archived stacks in all threads to check if
// the code is referenced.
CodeMarkingVisitor code_marking_visitor;
ThreadManager::IterateArchivedThreads(&code_marking_visitor);
SharedFunctionInfoMarkingVisitor visitor;
CompilationCache::IterateFunctions(&visitor);
MarkCompactCollector::ProcessMarkingStack();
}
// Visitor class for marking heap roots.
class RootMarkingVisitor : public ObjectVisitor {
public:
......@@ -793,6 +989,8 @@ void MarkCompactCollector::MarkLiveObjects() {
ASSERT(!marking_stack.overflowed());
PrepareForCodeFlushing();
RootMarkingVisitor root_visitor;
MarkRoots(&root_visitor);
......
......@@ -175,6 +175,10 @@ class MarkCompactCollector: public AllStatic {
friend class RootMarkingVisitor;
friend class MarkingVisitor;
friend class StaticMarkingVisitor;
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
static void PrepareForCodeFlushing();
// Marking operations for objects reachable from roots.
static void MarkLiveObjects();
......
......@@ -2563,6 +2563,7 @@ BOOL_ACCESSORS(SharedFunctionInfo,
allows_lazy_compilation,
kAllowLazyCompilation)
#if V8_HOST_ARCH_32_BIT
SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
......@@ -2662,6 +2663,11 @@ Code* SharedFunctionInfo::code() {
}
Code* SharedFunctionInfo::unchecked_code() {
return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
}
void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
WRITE_FIELD(this, kCodeOffset, value);
CONDITIONAL_WRITE_BARRIER(this, kCodeOffset, mode);
......@@ -2708,6 +2714,17 @@ int SharedFunctionInfo::custom_call_generator_id() {
}
int SharedFunctionInfo::code_age() {
return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
}
void SharedFunctionInfo::set_code_age(int code_age) {
set_compiler_hints(compiler_hints() |
((code_age & kCodeAgeMask) << kCodeAgeShift));
}
bool JSFunction::IsBuiltin() {
return context()->global()->IsJSBuiltinsObject();
}
......@@ -2718,6 +2735,11 @@ Code* JSFunction::code() {
}
Code* JSFunction::unchecked_code() {
return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
}
void JSFunction::set_code(Code* value) {
// Skip the write barrier because code is never in new space.
ASSERT(!Heap::InNewSpace(value));
......@@ -2735,6 +2757,12 @@ Object* JSFunction::unchecked_context() {
}
SharedFunctionInfo* JSFunction::unchecked_shared() {
return reinterpret_cast<SharedFunctionInfo*>(
READ_FIELD(this, kSharedFunctionInfoOffset));
}
void JSFunction::set_context(Object* value) {
ASSERT(value == Heap::undefined_value() || value->IsContext());
WRITE_FIELD(this, kContextOffset, value);
......
......@@ -101,7 +101,6 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_VALUE_TYPE:
case JS_ARRAY_TYPE:
case JS_REGEXP_TYPE:
case JS_FUNCTION_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
......@@ -109,6 +108,9 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
kVisitJSObjectGeneric,
instance_size);
case JS_FUNCTION_TYPE:
return kVisitJSFunction;
case HEAP_NUMBER_TYPE:
case PIXEL_ARRAY_TYPE:
case EXTERNAL_BYTE_ARRAY_TYPE:
......
......@@ -100,6 +100,7 @@ class StaticVisitorBase : public AllStatic {
kVisitMap,
kVisitPropertyCell,
kVisitSharedFunctionInfo,
kVisitJSFunction,
kVisitorIdCount,
kMinObjectSizeInWords = 2
......@@ -204,6 +205,7 @@ class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
template<int object_size>
static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
IteratePointers(object, BodyDescriptor::kStartOffset, object_size);
return static_cast<ReturnType>(object_size);
}
......@@ -268,6 +270,10 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
table_.Register(kVisitJSFunction,
&JSObjectVisitor::
template VisitSpecialized<JSFunction::kSize>);
table_.RegisterSpecializations<DataObjectVisitor,
kVisitDataObject,
kVisitDataObjectGeneric>();
......@@ -275,8 +281,8 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
kVisitJSObject,
kVisitJSObjectGeneric>();
table_.RegisterSpecializations<StructVisitor,
kVisitStruct,
kVisitStructGeneric>();
kVisitStruct,
kVisitStructGeneric>();
}
static inline int IterateBody(Map* map, HeapObject* obj) {
......
......@@ -5099,7 +5099,7 @@ Object* Oddball::Initialize(const char* to_string, Object* to_number) {
bool SharedFunctionInfo::HasSourceCode() {
return !script()->IsUndefined() &&
!Script::cast(script())->source()->IsUndefined();
!reinterpret_cast<Script*>(script())->source()->IsUndefined();
}
......
......@@ -3371,6 +3371,8 @@ class SharedFunctionInfo: public HeapObject {
// [construct stub]: Code stub for constructing instances of this function.
DECL_ACCESSORS(construct_stub, Code)
inline Code* unchecked_code();
// Returns if this function has been compiled to native code yet.
inline bool is_compiled();
......@@ -3478,6 +3480,15 @@ class SharedFunctionInfo: public HeapObject {
inline bool allows_lazy_compilation();
inline void set_allows_lazy_compilation(bool flag);
// Indicates how many full GCs this function has survived with assigned
// code object. Used to determine when it is relatively safe to flush
// this code object and replace it with lazy compilation stub.
// Age is reset when GC notices that the code object is referenced
// from the stack or compilation cache.
inline int code_age();
inline void set_code_age(int age);
// Check whether a inlined constructor can be generated with the given
// prototype.
bool CanGenerateInlineConstructor(Object* prototype);
......@@ -3608,6 +3619,8 @@ class SharedFunctionInfo: public HeapObject {
static const int kHasOnlySimpleThisPropertyAssignments = 0;
static const int kTryFullCodegen = 1;
static const int kAllowLazyCompilation = 2;
static const int kCodeAgeShift = 3;
static const int kCodeAgeMask = 7;
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo);
};
......@@ -3623,6 +3636,8 @@ class JSFunction: public JSObject {
// can be shared by instances.
DECL_ACCESSORS(shared, SharedFunctionInfo)
inline SharedFunctionInfo* unchecked_shared();
// [context]: The context for this function.
inline Context* context();
inline Object* unchecked_context();
......@@ -3635,6 +3650,8 @@ class JSFunction: public JSObject {
inline Code* code();
inline void set_code(Code* value);
inline Code* unchecked_code();
// Tells whether this function is builtin.
inline bool IsBuiltin();
......
......@@ -210,7 +210,6 @@ class StubCache : public AllStatic {
static Object* ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind);
#endif
// Update cache for entry hash(name, map).
static Code* Set(String* name, Map* map, Code* code);
......
......@@ -973,9 +973,9 @@ TEST(TestCodeFlushing) {
Heap::CollectAllGarbage(true);
Heap::CollectAllGarbage(true);
// foo should still be in the compilation cache and therefore not
// have been removed.
CHECK(function->shared()->is_compiled());
Heap::CollectAllGarbage(true);
Heap::CollectAllGarbage(true);
Heap::CollectAllGarbage(true);
Heap::CollectAllGarbage(true);
......@@ -983,7 +983,9 @@ TEST(TestCodeFlushing) {
// foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled());
CHECK(!function->is_compiled());
// Call foo to get it recompiled.
CompileRun("foo()");
CHECK(function->shared()->is_compiled());
CHECK(function->is_compiled());
}
......@@ -216,7 +216,7 @@ def reclaimed_bytes(row):
return row['total_size_before'] - row['total_size_after']
def other_scope(r):
return r['pause'] - r['mark'] - r['sweep'] - r['compact'] - r['flushcode']
return r['pause'] - r['mark'] - r['sweep'] - r['compact']
plots = [
[
......@@ -226,7 +226,6 @@ plots = [
Plot(Item('Marking', 'mark', lc = 'purple'),
Item('Sweep', 'sweep', lc = 'blue'),
Item('Compaction', 'compact', lc = 'red'),
Item('Flush Code', 'flushcode', lc = 'yellow'),
Item('Other', other_scope, lc = 'grey'))
],
[
......@@ -288,7 +287,10 @@ def process_trace(filename):
n = len(trace)
total = calc_total(trace, field)
max = calc_max(trace, field)
avg = total / n
if n > 0:
avg = total / n
else:
avg = 0
if n > 1:
dev = math.sqrt(freduce(lambda t,r: (r - avg) ** 2, field, trace, 0) /
(n - 1))
......@@ -303,14 +305,14 @@ def process_trace(filename):
with open(filename + '.html', 'w') as out:
out.write('<html><body>')
out.write('<table>')
out.write('<tr><td>Phase</td><td>Count</td><td>Time (ms)</td><td>Max</td><td>Avg</td></tr>')
out.write('<tr><td>Phase</td><td>Count</td><td>Time (ms)</td>')
out.write('<td>Max</td><td>Avg</td></tr>')
stats(out, 'Total in GC', trace, 'pause')
stats(out, 'Scavenge', scavenges, 'pause')
stats(out, 'MarkSweep', marksweeps, 'pause')
stats(out, 'MarkCompact', markcompacts, 'pause')
stats(out, 'Mark', filter(lambda r: r['mark'] != 0, trace), 'mark')
stats(out, 'Sweep', filter(lambda r: r['sweep'] != 0, trace), 'sweep')
stats(out, 'Flush Code', filter(lambda r: r['flushcode'] != 0, trace), 'flushcode')
stats(out, 'Compact', filter(lambda r: r['compact'] != 0, trace), 'compact')
out.write('</table>')
for chart in charts:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment