Commit 8402db12 authored by ricow@chromium.org's avatar ricow@chromium.org

Change lazy compilation stub to a builtin.

This change changes the lazy compilation stub to a builtin and
eliminates the argc (argument count for the function for which to
create a lazy stub) parameter.

Review URL: http://codereview.chromium.org/3146008

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5244 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent fcfe6d74
......@@ -911,6 +911,29 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
__ EnterInternalFrame();
// Preserve the function.
__ push(r1);
// Push the function on the stack as the argument to the runtime function.
__ push(r1);
__ CallRuntime(Runtime::kLazyCompile, 1);
// Calculate the entry point.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore saved function.
__ pop(r1);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ Jump(r2);
}
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 1. Make sure we have at least one argument.
// r0: actual number of arguments
......
......@@ -1212,38 +1212,6 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
}
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
// ----------- S t a t e -------------
// -- r1: function
// -- lr: return address
// -----------------------------------
// Enter an internal frame.
__ EnterInternalFrame();
// Preserve the function.
__ push(r1);
// Push the function on the stack as the argument to the runtime function.
__ push(r1);
__ CallRuntime(Runtime::kLazyCompile, 1);
// Calculate the entry point.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore saved function.
__ pop(r1);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ Jump(r2);
return GetCodeWithFlags(flags, "LazyCompileStub");
}
void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
if (kind_ == Code::KEYED_CALL_IC) {
__ cmp(r2, Operand(Handle<String>(name)));
......
......@@ -69,6 +69,7 @@ enum BuiltinExtraArguments {
V(JSConstructStubApi, BUILTIN, UNINITIALIZED) \
V(JSEntryTrampoline, BUILTIN, UNINITIALIZED) \
V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED) \
V(LazyCompile, BUILTIN, UNINITIALIZED) \
\
V(LoadIC_Miss, BUILTIN, UNINITIALIZED) \
V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED) \
......@@ -249,6 +250,7 @@ class Builtins : public AllStatic {
static void Generate_JSConstructStubApi(MacroAssembler* masm);
static void Generate_JSEntryTrampoline(MacroAssembler* masm);
static void Generate_JSConstructEntryTrampoline(MacroAssembler* masm);
static void Generate_LazyCompile(MacroAssembler* masm);
static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm);
static void Generate_FunctionCall(MacroAssembler* masm);
......
......@@ -494,7 +494,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
// Generate code
Handle<Code> code;
if (FLAG_lazy && allow_lazy) {
code = ComputeLazyCompile(literal->num_parameters());
code = Handle<Code>(Builtins::builtin(Builtins::LazyCompile));
} else {
// The bodies of function literals have not yet been visited by
// the AST optimizer/analyzer.
......
......@@ -819,11 +819,6 @@ OptimizedObjectForAddingMultipleProperties(Handle<JSObject> object,
}
Handle<Code> ComputeLazyCompile(int argc) {
CALL_HEAP_FUNCTION(StubCache::ComputeLazyCompile(argc), Code);
}
OptimizedObjectForAddingMultipleProperties::
~OptimizedObjectForAddingMultipleProperties() {
// Reoptimize the object to allow fast property access.
......
......@@ -353,9 +353,6 @@ bool CompileLazyInLoop(Handle<JSFunction> function,
Handle<Object> receiver,
ClearExceptionFlag flag);
// Returns the lazy compilation stub for argc arguments.
Handle<Code> ComputeLazyCompile(int argc);
class NoHandleAllocation BASE_EMBEDDED {
public:
#ifndef DEBUG
......
......@@ -2504,8 +2504,7 @@ static void FlushCodeForFunction(JSFunction* function) {
if (CodeIsActive(shared_info->code())) return;
// Compute the lazy compilable version of the code.
HandleScope scope;
Code* code = *ComputeLazyCompile(shared_info->length());
Code* code = Builtins::builtin(Builtins::LazyCompile);
shared_info->set_code(code);
function->set_code(code);
}
......
......@@ -429,6 +429,26 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
__ EnterInternalFrame();
// Push a copy of the function onto the stack.
__ push(edi);
__ push(edi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyCompile, 1);
__ pop(edi);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
__ jmp(Operand(ecx));
}
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 1. Make sure we have at least one argument.
{ Label done;
......
......@@ -1255,30 +1255,6 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
}
// TODO(1241006): Avoid having lazy compile stubs specialized by the
// number of arguments. It is not needed anymore.
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
// Enter an internal frame.
__ EnterInternalFrame();
// Push a copy of the function onto the stack.
__ push(edi);
__ push(edi); // function is also the parameter to the runtime call
__ CallRuntime(Runtime::kLazyCompile, 1);
__ pop(edi);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
__ jmp(Operand(ecx));
return GetCodeWithFlags(flags, "LazyCompileStub");
}
void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
if (kind_ == Code::KEYED_CALL_IC) {
__ cmp(Operand(ecx), Immediate(Handle<String>(name)));
......
......@@ -2661,8 +2661,7 @@ void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
bool SharedFunctionInfo::is_compiled() {
// TODO(1242782): Create a code kind for uncompiled code.
return code()->kind() != Code::STUB;
return code() != Builtins::builtin(Builtins::LazyCompile);
}
......@@ -2773,7 +2772,7 @@ bool JSFunction::should_have_prototype() {
bool JSFunction::is_compiled() {
return code()->kind() != Code::STUB;
return code() != Builtins::builtin(Builtins::LazyCompile);
}
......
......@@ -789,23 +789,6 @@ Object* StubCache::ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) {
#endif
Object* StubCache::ComputeLazyCompile(int argc) {
Code::Flags flags =
Code::ComputeFlags(Code::STUB, NOT_IN_LOOP, UNINITIALIZED, NORMAL, argc);
Object* probe = ProbeCache(flags);
if (!probe->IsUndefined()) return probe;
StubCompiler compiler;
Object* result = FillCache(compiler.CompileLazyCompile(flags));
if (result->IsCode()) {
Code* code = Code::cast(result);
USE(code);
PROFILE(CodeCreateEvent(Logger::LAZY_COMPILE_TAG,
code, code->arguments_count()));
}
return result;
}
void StubCache::Clear() {
for (int i = 0; i < kPrimaryTableSize; i++) {
primary_[i].key = Heap::empty_string();
......
......@@ -210,8 +210,6 @@ class StubCache : public AllStatic {
static Object* ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind);
#endif
static Object* ComputeLazyCompile(int argc);
// Update cache for entry hash(name, map).
static Code* Set(String* name, Map* map, Code* code);
......@@ -357,7 +355,6 @@ class StubCompiler BASE_EMBEDDED {
Object* CompileCallDebugBreak(Code::Flags flags);
Object* CompileCallDebugPrepareStepIn(Code::Flags flags);
#endif
Object* CompileLazyCompile(Code::Flags flags);
// Static functions for generating parts of stubs.
static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
......
......@@ -1291,6 +1291,26 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
Generate_JSEntryTrampolineHelper(masm, true);
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
__ EnterInternalFrame();
// Push a copy of the function onto the stack.
__ push(rdi);
__ push(rdi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyCompile, 1);
__ pop(rdi);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rcx);
}
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64
......@@ -2039,30 +2039,6 @@ Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
}
// TODO(1241006): Avoid having lazy compile stubs specialized by the
// number of arguments. It is not needed anymore.
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
// Enter an internal frame.
__ EnterInternalFrame();
// Push a copy of the function onto the stack.
__ push(rdi);
__ push(rdi); // function is also the parameter to the runtime call
__ CallRuntime(Runtime::kLazyCompile, 1);
__ pop(rdi);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rcx);
return GetCodeWithFlags(flags, "LazyCompileStub");
}
void StubCompiler::GenerateLoadInterceptor(JSObject* object,
JSObject* interceptor_holder,
LookupResult* lookup,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment