Commit 6e12a16a authored by ulan@chromium.org's avatar ulan@chromium.org

Use a filter instead of a visitor to deoptimize selected functions in a context.

This makes the DeoptimizeAll function O(n) instead of O(n^2) where n in the number of optimized functions.

Before this change, DeoptimizeAll iterated over the optimized function list and called DeoptimizingVisitor for each function. The visitor iterated over the optimized function list again to remove the functions that share the same optimized code.

This change partitions the optimized function list into one or more lists of related functions in one pass over the optimized function list.

R=mstarzinger@chromium.org

Review URL: https://chromiumcodereview.appspot.com/11547015

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13226 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 133957e7
......@@ -44,11 +44,14 @@ int Deoptimizer::patch_size() {
}
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
HandleScope scope;
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
AssertNoAllocation no_allocation;
if (!function->IsOptimized()) return;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// The optimized code is going to be patched, so we cannot use it
// any more. Play safe and reset the whole cache.
......@@ -91,8 +94,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
#endif
}
Isolate* isolate = code->GetIsolate();
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
......
......@@ -247,45 +247,6 @@ void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
}
class DeoptimizingVisitor : public OptimizedFunctionVisitor {
public:
virtual void EnterContext(Context* context) {
if (FLAG_trace_deopt) {
PrintF("[deoptimize context: %" V8PRIxPTR "]\n",
reinterpret_cast<intptr_t>(context));
}
}
virtual void VisitFunction(JSFunction* function) {
Deoptimizer::DeoptimizeFunction(function);
}
virtual void LeaveContext(Context* context) {
context->ClearOptimizedFunctions();
}
};
void Deoptimizer::DeoptimizeAll() {
AssertNoAllocation no_allocation;
if (FLAG_trace_deopt) {
PrintF("[deoptimize all contexts]\n");
}
DeoptimizingVisitor visitor;
VisitAllOptimizedFunctions(&visitor);
}
void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
AssertNoAllocation no_allocation;
DeoptimizingVisitor visitor;
VisitAllOptimizedFunctionsForGlobalObject(object, &visitor);
}
void Deoptimizer::VisitAllOptimizedFunctionsForContext(
Context* context, OptimizedFunctionVisitor* visitor) {
Isolate* isolate = context->GetIsolate();
......@@ -315,36 +276,151 @@ void Deoptimizer::VisitAllOptimizedFunctionsForContext(
}
void Deoptimizer::VisitAllOptimizedFunctionsForGlobalObject(
JSObject* object, OptimizedFunctionVisitor* visitor) {
void Deoptimizer::VisitAllOptimizedFunctions(
OptimizedFunctionVisitor* visitor) {
AssertNoAllocation no_allocation;
// Run through the list of all native contexts and deoptimize.
Object* context = Isolate::Current()->heap()->native_contexts_list();
while (!context->IsUndefined()) {
VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
}
}
// Removes the functions selected by the given filter from the optimized
// function list of the given context and partitions the removed functions
// into one or more lists such that all functions in a list share the same
// code. The head of each list is written in the deoptimizing_functions field
// of the corresponding code object.
// The found code objects are returned in the given zone list.
static void PartitionOptimizedFunctions(Context* context,
OptimizedFunctionFilter* filter,
ZoneList<Code*>* partitions,
Zone* zone,
Object* undefined) {
AssertNoAllocation no_allocation;
Object* current = context->get(Context::OPTIMIZED_FUNCTIONS_LIST);
Object* remainder_head = undefined;
Object* remainder_tail = undefined;
ASSERT_EQ(0, partitions->length());
while (current != undefined) {
JSFunction* function = JSFunction::cast(current);
current = function->next_function_link();
if (filter->TakeFunction(function)) {
Code* code = function->code();
if (code->deoptimizing_functions() == undefined) {
partitions->Add(code, zone);
} else {
ASSERT(partitions->Contains(code));
}
function->set_next_function_link(code->deoptimizing_functions());
code->set_deoptimizing_functions(function);
} else {
if (remainder_head == undefined) {
remainder_head = function;
} else {
JSFunction::cast(remainder_tail)->set_next_function_link(function);
}
remainder_tail = function;
}
}
if (remainder_tail != undefined) {
JSFunction::cast(remainder_tail)->set_next_function_link(undefined);
}
context->set(Context::OPTIMIZED_FUNCTIONS_LIST, remainder_head);
}
class DeoptimizeAllFilter : public OptimizedFunctionFilter {
public:
virtual bool TakeFunction(JSFunction* function) {
return true;
}
};
class DeoptimizeWithMatchingCodeFilter : public OptimizedFunctionFilter {
public:
explicit DeoptimizeWithMatchingCodeFilter(Code* code) : code_(code) {}
virtual bool TakeFunction(JSFunction* function) {
return function->code() == code_;
}
private:
Code* code_;
};
void Deoptimizer::DeoptimizeAll() {
AssertNoAllocation no_allocation;
if (FLAG_trace_deopt) {
PrintF("[deoptimize all contexts]\n");
}
DeoptimizeAllFilter filter;
DeoptimizeAllFunctionsWith(&filter);
}
void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
AssertNoAllocation no_allocation;
DeoptimizeAllFilter filter;
if (object->IsJSGlobalProxy()) {
Object* proto = object->GetPrototype();
ASSERT(proto->IsJSGlobalObject());
VisitAllOptimizedFunctionsForContext(
GlobalObject::cast(proto)->native_context(), visitor);
DeoptimizeAllFunctionsForContext(
GlobalObject::cast(proto)->native_context(), &filter);
} else if (object->IsGlobalObject()) {
VisitAllOptimizedFunctionsForContext(
GlobalObject::cast(object)->native_context(), visitor);
DeoptimizeAllFunctionsForContext(
GlobalObject::cast(object)->native_context(), &filter);
}
}
void Deoptimizer::VisitAllOptimizedFunctions(
OptimizedFunctionVisitor* visitor) {
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (!function->IsOptimized()) return;
Code* code = function->code();
Context* context = function->context()->native_context();
Isolate* isolate = context->GetIsolate();
Object* undefined = isolate->heap()->undefined_value();
Zone* zone = isolate->runtime_zone();
ZoneScope zone_scope(zone, DELETE_ON_EXIT);
ZoneList<Code*> codes(1, zone);
DeoptimizeWithMatchingCodeFilter filter(code);
PartitionOptimizedFunctions(context, &filter, &codes, zone, undefined);
ASSERT_EQ(1, codes.length());
DeoptimizeFunctionWithPreparedFunctionList(
JSFunction::cast(codes.at(0)->deoptimizing_functions()));
codes.at(0)->set_deoptimizing_functions(undefined);
}
void Deoptimizer::DeoptimizeAllFunctionsForContext(
Context* context, OptimizedFunctionFilter* filter) {
ASSERT(context->IsNativeContext());
Isolate* isolate = context->GetIsolate();
Object* undefined = isolate->heap()->undefined_value();
Zone* zone = isolate->runtime_zone();
ZoneScope zone_scope(zone, DELETE_ON_EXIT);
ZoneList<Code*> codes(1, zone);
PartitionOptimizedFunctions(context, filter, &codes, zone, undefined);
for (int i = 0; i < codes.length(); ++i) {
DeoptimizeFunctionWithPreparedFunctionList(
JSFunction::cast(codes.at(i)->deoptimizing_functions()));
codes.at(i)->set_deoptimizing_functions(undefined);
}
}
void Deoptimizer::DeoptimizeAllFunctionsWith(OptimizedFunctionFilter* filter) {
AssertNoAllocation no_allocation;
// Run through the list of all native contexts and deoptimize.
Object* context = Isolate::Current()->heap()->native_contexts_list();
while (!context->IsUndefined()) {
// GC can happen when the context is not fully initialized,
// so the global field of the context can be undefined.
Object* global = Context::cast(context)->get(Context::GLOBAL_OBJECT_INDEX);
if (!global->IsUndefined()) {
VisitAllOptimizedFunctionsForGlobalObject(JSObject::cast(global),
visitor);
}
DeoptimizeAllFunctionsForContext(Context::cast(context), filter);
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
}
}
......@@ -1476,44 +1552,11 @@ void Deoptimizer::RemoveDeoptimizingCode(Code* code) {
}
static Object* CutOutRelatedFunctionsList(Context* context,
Code* code,
Object* undefined) {
Object* result_list_head = undefined;
Object* head;
Object* current;
current = head = context->get(Context::OPTIMIZED_FUNCTIONS_LIST);
JSFunction* prev = NULL;
while (current != undefined) {
JSFunction* func = JSFunction::cast(current);
current = func->next_function_link();
if (func->code() == code) {
func->set_next_function_link(result_list_head);
result_list_head = func;
if (prev) {
prev->set_next_function_link(current);
} else {
head = current;
}
} else {
prev = func;
}
}
if (head != context->get(Context::OPTIMIZED_FUNCTIONS_LIST)) {
context->set(Context::OPTIMIZED_FUNCTIONS_LIST, head);
}
return result_list_head;
}
void Deoptimizer::ReplaceCodeForRelatedFunctions(JSFunction* function,
Code* code) {
Context* context = function->context()->native_context();
SharedFunctionInfo* shared = function->shared();
Object* undefined = Isolate::Current()->heap()->undefined_value();
Object* current = CutOutRelatedFunctionsList(context, code, undefined);
Object* current = function;
while (current != undefined) {
JSFunction* func = JSFunction::cast(current);
......
......@@ -87,6 +87,14 @@ class OptimizedFunctionVisitor BASE_EMBEDDED {
};
class OptimizedFunctionFilter BASE_EMBEDDED {
public:
virtual ~OptimizedFunctionFilter() {}
virtual bool TakeFunction(JSFunction* function) = 0;
};
class Deoptimizer;
......@@ -177,12 +185,14 @@ class Deoptimizer : public Malloced {
static void DeoptimizeGlobalObject(JSObject* object);
static void DeoptimizeAllFunctionsWith(OptimizedFunctionFilter* filter);
static void DeoptimizeAllFunctionsForContext(
Context* context, OptimizedFunctionFilter* filter);
static void VisitAllOptimizedFunctionsForContext(
Context* context, OptimizedFunctionVisitor* visitor);
static void VisitAllOptimizedFunctionsForGlobalObject(
JSObject* object, OptimizedFunctionVisitor* visitor);
static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
// The size in bytes of the code required at a lazy deopt patch site.
......@@ -353,6 +363,10 @@ class Deoptimizer : public Malloced {
static Code* FindDeoptimizingCodeFromAddress(Address addr);
static void RemoveDeoptimizingCode(Code* code);
// Deoptimize function assuming that function->next_function_link() points
// to a list that contains all functions that share the same optimized code.
static void DeoptimizeFunctionWithPreparedFunctionList(JSFunction* function);
// Fill the input from from a JavaScript frame. This is used when
// the debugger needs to inspect an optimized frame. For normal
// deoptimizations the input frame is filled in generated code.
......
......@@ -114,17 +114,19 @@ void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
}
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
if (!function->IsOptimized()) return;
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
AssertNoAllocation no_allocation;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// The optimized code is going to be patched, so we cannot use it
// any more. Play safe and reset the whole cache.
function->shared()->ClearOptimizedCodeMap();
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
AssertNoAllocation no_allocation;
// Get the optimized code.
Code* code = function->code();
Address code_start_address = code->instruction_start();
......
......@@ -1223,23 +1223,15 @@ static bool IsInlined(JSFunction* function, SharedFunctionInfo* candidate) {
}
class DependentFunctionsDeoptimizingVisitor : public OptimizedFunctionVisitor {
class DependentFunctionFilter : public OptimizedFunctionFilter {
public:
explicit DependentFunctionsDeoptimizingVisitor(
explicit DependentFunctionFilter(
SharedFunctionInfo* function_info)
: function_info_(function_info) {}
virtual void EnterContext(Context* context) {
}
virtual void VisitFunction(JSFunction* function) {
if (function->shared() == function_info_ ||
IsInlined(function, function_info_)) {
Deoptimizer::DeoptimizeFunction(function);
}
}
virtual void LeaveContext(Context* context) {
virtual bool TakeFunction(JSFunction* function) {
return (function->shared() == function_info_ ||
IsInlined(function, function_info_));
}
private:
......@@ -1250,8 +1242,8 @@ class DependentFunctionsDeoptimizingVisitor : public OptimizedFunctionVisitor {
static void DeoptimizeDependentFunctions(SharedFunctionInfo* function_info) {
AssertNoAllocation no_allocation;
DependentFunctionsDeoptimizingVisitor visitor(function_info);
Deoptimizer::VisitAllOptimizedFunctions(&visitor);
DependentFunctionFilter filter(function_info);
Deoptimizer::DeoptimizeAllFunctionsWith(&filter);
}
......
......@@ -42,11 +42,14 @@ int Deoptimizer::patch_size() {
}
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
HandleScope scope;
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
AssertNoAllocation no_allocation;
if (!function->IsOptimized()) return;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// The optimized code is going to be patched, so we cannot use it
// any more. Play safe and reset the whole cache.
......@@ -87,8 +90,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
#endif
}
Isolate* isolate = code->GetIsolate();
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
......
......@@ -4811,6 +4811,18 @@ void Code::set_stub_info(int value) {
}
void Code::set_deoptimizing_functions(Object* value) {
ASSERT(kind() == OPTIMIZED_FUNCTION);
WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
}
Object* Code::deoptimizing_functions() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
}
ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
INT_ACCESSORS(Code, ic_age, kICAgeOffset)
......
......@@ -4254,13 +4254,18 @@ class Code: public HeapObject {
// [deoptimization_data]: Array containing data for deopt.
DECL_ACCESSORS(deoptimization_data, FixedArray)
// [type_feedback_info]: Struct containing type feedback information.
// [type_feedback_info]: Struct containing type feedback information for
// unoptimized code. Optimized code can temporarily store the head of
// the list of the dependent optimized functions during deoptimization.
// STUBs can use this slot to store arbitrary information as a Smi.
// Will contain either a TypeFeedbackInfo object, or undefined, or a Smi.
// Will contain either a TypeFeedbackInfo object, or JSFunction object,
// or undefined, or a Smi.
DECL_ACCESSORS(type_feedback_info, Object)
inline void InitializeTypeFeedbackInfoNoWriteBarrier(Object* value);
inline int stub_info();
inline void set_stub_info(int info);
inline Object* deoptimizing_functions();
inline void set_deoptimizing_functions(Object* value);
// [gc_metadata]: Field used to hold GC related metadata. The contents of this
// field does not have to be traced during garbage collection since
......@@ -5125,8 +5130,7 @@ class Map: public HeapObject {
kConstructorOffset + kPointerSize;
static const int kDescriptorsOffset =
kTransitionsOrBackPointerOffset + kPointerSize;
static const int kCodeCacheOffset =
kDescriptorsOffset + kPointerSize;
static const int kCodeCacheOffset = kDescriptorsOffset + kPointerSize;
static const int kBitField3Offset = kCodeCacheOffset + kPointerSize;
static const int kSize = kBitField3Offset + kPointerSize;
......@@ -6148,6 +6152,18 @@ class JSFunction: public JSObject {
// Retrieve the native context from a function's literal array.
static Context* NativeContextFromLiterals(FixedArray* literals);
#ifdef DEBUG
bool FunctionsInFunctionListShareSameCode() {
Object* current = this;
while (!current->IsUndefined()) {
JSFunction* function = JSFunction::cast(current);
current = function->next_function_link();
if (function->code() != this->code()) return false;
}
return true;
}
#endif
// Layout descriptors. The last property (from kNonWeakFieldsEndOffset to
// kSize) is weak and has special handling during garbage collection.
static const int kCodeEntryOffset = JSObject::kHeaderSize;
......
......@@ -46,11 +46,14 @@ int Deoptimizer::patch_size() {
}
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
HandleScope scope;
void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
JSFunction* function) {
Isolate* isolate = function->GetIsolate();
HandleScope scope(isolate);
AssertNoAllocation no_allocation;
if (!function->IsOptimized()) return;
ASSERT(function->IsOptimized());
ASSERT(function->FunctionsInFunctionListShareSameCode());
// The optimized code is going to be patched, so we cannot use it
// any more. Play safe and reset the whole cache.
......@@ -91,8 +94,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
#endif
}
Isolate* isolate = code->GetIsolate();
// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
DeoptimizerData* data = isolate->deoptimizer_data();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment